diff --git a/.clang-format b/.clang-format index 3690ab91..7b42f0da 100644 --- a/.clang-format +++ b/.clang-format @@ -108,9 +108,6 @@ PenaltyBreakTemplateDeclaration: 10 PenaltyExcessCharacter: 1000000 PenaltyReturnTypeOnItsOwnLine: 200 PointerAlignment: Left -Cpp11BracedListStyle: true -PenaltyReturnTypeOnItsOwnLine: 200 -PointerAlignment: Left RawStringFormats: - Language: Cpp Delimiters: diff --git a/.gitignore b/.gitignore index aa240814..1d30d7a6 100644 --- a/.gitignore +++ b/.gitignore @@ -27,3 +27,4 @@ CTestTestfile.cmake generated rules.ninja *.a +**/_build \ No newline at end of file diff --git a/CHANGELOG.rst b/CHANGELOG.rst index 5a343d92..9894da0b 100644 --- a/CHANGELOG.rst +++ b/CHANGELOG.rst @@ -2,8 +2,143 @@ Changelog ========= +[unreleased] [0.11.0-dev] +========================= + +Important notes +--------------- + +* Dropped support for python3.7 +* Dropped support macOS 10.15 +* This will be the last release that supports Ubuntu 18.04. +* Moved all library level modules under ``ouster.sdk``, this includes ``ouster.client``, ``ouster.pcap`` + ``ouster.osf``. So the new access name will be ``ouster.sdk.client``, ``ouster.sdk.pcap`` and so on +* [BREAKING] many of the ``ouster-cli`` commands and arguments have changed (see below.) +* [BREAKING] moved ``configure_sensor`` method to ``ouster.sdk.sensor.util`` module +* [BREAKING] removed the ``pcap_to_osf`` method. + + +examples +-------- + +* Added a new ``async_client_example.cpp`` C++ example. + + +Python SDK +---------- + +* Add support for python 3.12, including wheels on pypi +* Updated VCPKG libraries to 2023.10.19 +* New ``ScanSource`` API: + * Added new ``MultiScanSource`` that supports streaming and manipulating LidarScan frames + from multiple concurrent LidarScan sources + * For non-live sources the ``MultiScanSource`` have the option to choose LidarScan(s) by index + or choose a subset of scans using slicing operation + * The ``MultiScanSource`` interface has the ability to fallback to ``ScanSource`` using the + ``single_source(sensor_idx)``, ``ScanSource`` interface yield a single LidarScan on iteration + rather than a List + * The ``ScanSource`` interface obtained via ``single_source`` method supports same indexing and + and slicing operations as the ``MultiScanSource`` + * Added a generic ``open_source`` that accepts sensor urls, or a path to a pcap recording + or an osf file + * Add explicit flag ``index`` to index unindexed osf files, if flag is set to ``True`` the osf file + will be indexed and the index will be saved to the file on first attempt + * Display a progress bar during index of pcap file or osf (if unindexed) +* Improved the robustness of the ``resolve_metadata`` method used to + automatically identify the sensor metadata associated with a PCAP source. +* [bugfix] SimpleViz complains about missing fields +* [bugfix] Gracefully handle failed sensor connection attempts with proper error reporting +* [bugfix] Fix assertion error when using viz stepping on a live sensor +* [bugfix] Scope MultiLidarViz imports to viz commands +* [bugfix] LidarScan yielded with improper header/status +* [bugfix] OSF ScanSource fields property doesn't report the actual fields +* Removed ``ouster.sdkx``, the ``open_source`` command is now part of ``ouster.sdk`` module +* The ``FLAGS`` field is always added to the list fields of any source type by default. In case of a + dual return lidar profile then a second ``FLAGS2`` will also be added. + + +mapping +------- + +* Updated SLAM API and examples. +* Added real time frame dropping capability to SLAM API. +* The ``ouster-mapping`` package now uses ``point-cloud-utils`` instead of ``open3d``. +* improved per-column pose accuracy, which is now based on the actual column timestamps + + +ouster-cli +---------- + +* Many commands can now be chained together, e.g. ``ouster-cli source slam viz``. +* New ``save`` command can output the result in a variety of formats. +* Added ``--ts`` option for specifying the timestamps to use when saving an OSF + file. Host packet receive time is the default, but not all scan sources have + this info. Lidar packet timestamps can be used as an alternative. +* Changed the output format of ``ouster-cli discover`` to include more information. +* Added JSON format output option to ``ouster-cli discover``. +* Added a flag to output sensor user data to ``ouster-cli discover``. +* Update the minimum required version of ``zeroconf``. +* Removed ``python-magic`` package from required dependencies. +* Made the output of ``ouster-cli source info`` much more + user-friendly. (``ouster-cli source dump`` gives old output.) +* [breaking] changed the argument format of the ``slice`` command. +* [breaking] removed the ``--legacy`` and ``--non-legacy`` flags. +* [breaking] removed the ``ouster-cli mapping``, ``ouster-cli osf``, + ``ouster-cli pcap``, and ``ouster-cli sensor`` commands. +* [bugfix] return a nonzero exit code on error. +* [bugfix] fix an error that occurred when setting the IMU port using the + ``-i`` option. + + +ouster_client +------------- + +* Added a new buffered UDP source implementation ``BufferedUDPSource``. +* The method ``version_of_string`` is marked as deprecated, use ``version_from_string`` + instead. +* Added a new method ``firmware_version_from_metadata`` which works across firmwares. +* Added support for return order configuration parameter. +* Added support for gyro and accelerometer FSR configuration parameters. +* [bugfix] ``mtp_init_client`` throws a bad optional access. +* [bugfix] properly handle 32-bit frame IDs from the + ``FUSA_RNG15_RFL8_NIR8_DUAL`` sensor UDP profile. + + +ouster_osf +---------- + +* [breaking] Greatly simplified OSF writer API with examples. +* [breaking] removed the ``to_native`` and ``from_native`` methods. +* Updated Doxygen API documentation for OSF C++ API. +* Removed support for the deprecated "standard" OSF file format. (The streaming + OSF format is still supported.) +* Added ``osf_file_modify_metadata`` that allows updating the sensor info + associated with each lidar stream in an OSF file. +* Warn the user if reading an empty or improperly indexed file. + + +ouster_viz +---------- +* Added scaled palettes for calibrated reflectivity. +* Distance rings can now be hidden by setting their thickness to zero. +* [bugfix] Fix some rendering issues with the distance rings. +* [bugfix] Fix potential flickering in Viz + + +Known issues +------------ + +* ouster-cli discover may not provide info for sensors using IPv6 link-local + networks on Python 3.8 or with older versions of zeroconf. +* ouster-cli when combining ``slice`` command with ``viz`` the program will + exit once iterate over the selected range of scans even when + the ``--on-eof`` option is set to ``loop``. + - workaround: to have ``viz`` loop over the selected range, first perform a + ``slice`` with ``save``, then playback the generated file. + + [20231031] [0.10.0] -============ +=================== Important notes --------------- diff --git a/CMakeLists.txt b/CMakeLists.txt index 21c26907..e55c2cf5 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -16,14 +16,14 @@ include(VcpkgEnv) project(ouster_example VERSION 20231031) # generate version header -set(OusterSDK_VERSION_STRING 0.10.0) +set(OusterSDK_VERSION_STRING 0.11.0rc9) include(VersionGen) # ==== Options ==== option(CMAKE_POSITION_INDEPENDENT_CODE "Build position independent code." ON) option(BUILD_SHARED_LIBS "Build shared libraries." OFF) option(BUILD_PCAP "Build pcap utils." ON) -option(BUILD_OSF "Build Ouster OSF library." OFF) +option(BUILD_OSF "Build Ouster OSF library." ON) option(BUILD_VIZ "Build Ouster visualizer." ON) option(BUILD_TESTING "Build tests" OFF) option(BUILD_EXAMPLES "Build C++ examples" OFF) diff --git a/README.rst b/README.rst index e3da782b..b8c30032 100644 --- a/README.rst +++ b/README.rst @@ -9,9 +9,10 @@ Ouster SDK - libraries and tools for Ouster Lidar Sensors Cross-platform C++/Python Ouster Sensor Development Toolkit To get started with our sensors, client, and visualizer, please see our SDK and sensor documentation: +ouster-sdk/index.html - `Ouster SDK Documentation `_ -- `Ouster Sensor Documentaion `_ +- `Ouster Sensor Public Documentaion `_ This repository contains Ouster SDK source code for connecting to and configuring ouster sensors, reading and visualizing data. diff --git a/cmake/Coverage.cmake b/cmake/Coverage.cmake new file mode 100644 index 00000000..332ab7d6 --- /dev/null +++ b/cmake/Coverage.cmake @@ -0,0 +1,9 @@ +# Cmake Functions For Code Coverage + +FUNCTION(CodeCoverageFunctionality target) + if(DEFINED ENV{CMAKE_COVERAGE_TESTS} AND "$ENV{CMAKE_COVERAGE_TESTS}" MATCHES "true") + message(STATUS "Code Coverage Enabled: Target: ${target}") + target_link_libraries(${target} PRIVATE gcov) + target_compile_options(${target} PRIVATE -O0 --coverage -g) + endif() +ENDFUNCTION() diff --git a/docs/Doxyfile b/docs/Doxyfile index d5e03aa1..d279d7c6 100644 --- a/docs/Doxyfile +++ b/docs/Doxyfile @@ -586,13 +586,13 @@ HIDE_COMPOUND_REFERENCE= NO # will show which file needs to be included to use the class. # The default value is: YES. -SHOW_HEADERFILE = YES +#SHOW_HEADERFILE = YES # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. -SHOW_INCLUDE_FILES = YES +#SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader @@ -804,7 +804,7 @@ WARN_IF_DOC_ERROR = YES # parameters have no documentation without warning. # The default value is: YES. -WARN_IF_INCOMPLETE_DOC = YES +#WARN_IF_INCOMPLETE_DOC = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return @@ -830,7 +830,7 @@ WARN_AS_ERROR = NO # messages should be written. If left blank the output is written to standard # error (stderr). -WARN_LOGFILE = +WARN_LOGFILE = "$warn_log_file" #--------------------------------------------------------------------------- # Configuration options related to the input files @@ -845,6 +845,7 @@ WARN_LOGFILE = INPUT = ../ouster_client \ ../ouster_pcap \ ../ouster_viz \ + ../ouster_osf \ # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses @@ -1532,7 +1533,7 @@ GENERATE_TREEVIEW = NO # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. -FULL_SIDEBAR = NO +#FULL_SIDEBAR = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. @@ -1615,7 +1616,7 @@ USE_MATHJAX = NO # The default value is: MathJax_2. # This tag requires that the tag USE_MATHJAX is set to YES. -MATHJAX_VERSION = MathJax_2 +#MATHJAX_VERSION = MathJax_2 # When MathJax is enabled you can set the default output format to be used for # the MathJax output. For more details about the output format see MathJax @@ -2278,7 +2279,7 @@ EXTERNAL_PAGES = YES # powerful graphs. # The default value is: YES. -CLASS_DIAGRAMS = YES +CLASS_DIAGRAMS = NO # You can include diagrams made with dia in doxygen documentation. Doxygen will # then run dia to produce the diagram and insert it in the documentation. The @@ -2300,7 +2301,7 @@ HIDE_UNDOC_RELATIONS = YES # set to NO # The default value is: NO. -HAVE_DOT = NO +HAVE_DOT = YES # The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed # to run in parallel. When set to 0 doxygen will base this on the number of @@ -2342,7 +2343,7 @@ DOT_FONTPATH = # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. -CLASS_GRAPH = YES +CLASS_GRAPH = NO # If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a # graph for each documented class showing the direct and indirect implementation @@ -2351,14 +2352,14 @@ CLASS_GRAPH = YES # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. -COLLABORATION_GRAPH = YES +COLLABORATION_GRAPH = NO # If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for # groups, showing the direct groups dependencies. # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. -GROUP_GRAPHS = YES +GROUP_GRAPHS = NO # If the UML_LOOK tag is set to YES, doxygen will generate inheritance and # collaboration diagrams in a style similar to the OMG's Unified Modeling @@ -2458,7 +2459,7 @@ CALLER_GRAPH = NO # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. -GRAPHICAL_HIERARCHY = YES +GRAPHICAL_HIERARCHY = NO # If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the # dependencies a directory has on other directories in a graphical way. The @@ -2467,7 +2468,7 @@ GRAPHICAL_HIERARCHY = YES # The default value is: YES. # This tag requires that the tag HAVE_DOT is set to YES. -DIRECTORY_GRAPH = YES +DIRECTORY_GRAPH = NO # The DOT_IMAGE_FORMAT tag can be used to set the image format of the images # generated by dot. For an explanation of the image formats see the section @@ -2599,4 +2600,4 @@ GENERATE_LEGEND = YES # files. # The default value is: YES. -DOT_CLEANUP = YES +DOT_CLEANUP = NO diff --git a/docs/_templates/versions.html b/docs/_templates/versions.html new file mode 100644 index 00000000..0825f254 --- /dev/null +++ b/docs/_templates/versions.html @@ -0,0 +1,17 @@ +
+ + Version: {{ current_version }} + + +
+ {% if versions %} +
+
{{ _('Versions') }}
+ {% for version, url in versions %} +
{{ version }}
+ {% endfor %} +
+ {% endif %} +
+
+
\ No newline at end of file diff --git a/docs/cli/mapping-sessions.rst b/docs/cli/mapping-sessions.rst index 6dfa9a42..fc1304ac 100644 --- a/docs/cli/mapping-sessions.rst +++ b/docs/cli/mapping-sessions.rst @@ -1,9 +1,12 @@ -Start mapping with the ouster-cli +Start mapping with the Ouster-CLI ================================= +.. _ouster-cli-mapping: + Installation ------------ + The Ouster CLI mapping functionality is a part of the Ouster SDK Python package. @@ -31,17 +34,19 @@ following command: .. code:: bash - ouster-cli source HOSTNAME / FILENAME --help + ouster-cli source / --help -Currently, there are two main commands: ``slam`` and ``convert``. You can further explore each +Currently, there are two main commands: ``slam`` and ``save``. You can further explore each command by accessing their respective submenus using the ``--help`` flag. For example: .. code:: bash - ouster-cli source HOSTNAME / FILENAME slam --help + ouster-cli source / slam --help + + +SLAM Command +------------ -SLAM ----- Simultaneous localization and mapping (SLAM) is a technique that enables a system to construct a map of its surroundings while simultaneously determining its own position on that map. The Ouster SDK slam command writes lidar scans with per-column poses into an OSF file, an open-source @@ -59,36 +64,113 @@ Then execute the following command: .. code:: bash - ouster-cli source HOSTNAME / FILENAME slam viz -o sample.osf + ouster-cli source / slam viz -o sample.osf .. note:: Please replace with the corresponding hostname or IP of your sensor, and replace - with the actual file path and name of the pcap file. Similarly, make the + with the actual file path and name of the PCAP/OSF file. Similarly, make the necessary substitutions in the subsequent commands. The terminal will display details such as the output filename and the processing duration. The -output filename must have the .osf extension in order to be used by the convert command. +output filename must have the .osf extension in order to be used by the ``save`` command. + +You can adjust settings such as point size, color, switch between 2D images, and pause playback in the visualizer, among other options. More details can be found at the :ref:`Ouster Visualizer ` + + +Accumulated Scan in SLAM command visulizer +------------------------------------------ + +Within the Ouster Visualizer, there is a visualization feature known as **ScansAccumulator**. This functionality represents a continuation of efforts to visualize lidar data by incorporating SLAM-generated poses stored within the ``LidarScan.pose`` property." + +Available view modes +~~~~~~~~~~~~~~~~~~~~~ + +There are three view modes of **ScansAccumulator**, that may be enabled/disabled depending on +it's params and the data that is passed throught it: + + * **poses** (or **TRACK**), key ``8`` - all scan poses in a trajectory/path view (available only + if poses data is present in scans) + * **scan map** (or **MAP**), key ``7`` - overall map view with select ratio of random points + from every scan (available for scans with/without poses) + * **scan accum** (or **ACCUM**), key ``6`` - accumulated *N* scans (key frames) that is picked + according to params (available for scans with/without poses) + + +Key bindings +~~~~~~~~~~~~~ + +Keyboard controls available with **ScansAccumulator**: + + ============== ============================================================= + Key What it does + ============== ============================================================= + ``6`` Toggle scans accumulation view mode (ACCUM) + ``7`` Toggle overall map view mode (MAP) + ``8`` Toggle poses/trajectory view mode (TRACK) + ``k / K`` Cycle point cloud coloring mode of accumulated clouds or map + ``g / G`` Cycle point cloud color palette of accumulated clouds or map + ``j / J`` Increase/decrease point size of accumulated clouds or map + ============== ============================================================= + +Ouster CLI **ScansAccumulator** options: + + * ``--accum-num N`` - accumulate *N* scans (default: ``0``) + * ``--accum-every K`` - accumulate every *Kth* scan (default: ``1``) + * ``--accum-every-m M`` - accumulate a scan every *Mth* meters traveled (default: ``None``) + * ``--accum-map`` - enable the overall map accumulation, select some percentage of points from + every scan (default: disabled) + * ``--accum-map-ratio R`` - set *R* as a ratio of points to randomly select from every scan + (default: ``0.001`` (*0.1%*)) + +Dense accumulated clouds view (with every point of a scan) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -You can modify settings like point size, color, switching 2D images, pause playing in the -visualizer and display accumulated scans. More details can be found at the -:ref:`Ouster Visualizer ` and :ref:`Scans Accumulator ` +To obtain the densest view use the ``--accum-num N --accum-every 1`` params where ``N`` is the +number of clouds to accumulate (``N`` up to 100 is generally small enough to avoid slowing down the viz interface):: + ouster-cli source / slam viz --accum-num 20 -o sample.osf -Convert -------- -The convert command converts the SLAM-generated OSF file to a point cloud data file +and the dense accumulated clouds result: + +.. figure:: /images/scans_accum_dense_every.png + + Dense view of 20 accumulated scans during the ``slam viz`` run + + +Overall map view (with poses) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +One of the main tasks we frequently need is a preview of the overall map. We can test this by using the generated OSF file, which was created with the above command and contains the SLAM-generated ``LidarScan.pose`` property. +:: + + ouster-cli source sample.osf viz --accum-num 20 \ + --accum-every 0 --accum-every-m 10.5 --accum-map -r 0 -e stop + + +Here is a preview example of the overall map generated from the accumulated scan results. By utilizing the '-e stop' option, the visualizer stops once the replay process finishes, displaying the preview of the lidar trajectory: + +.. figure:: /images/scans_accum_map_all_scan.png + + Data fully replayed with map and accum enabled (last current scan is displayed here in grey + palette) + + +Save Command +------------ + +The ``save`` command can be used to convert the SLAM-generated OSF file to a point cloud data file format such as LAS (.las), PLY (.ply), or PCD (.pcd). The output file format depends on the -extension of the output filename. Let's use the OSF file generated from the SLAM command and convert -it to a PLY file: +extension of the output filename. Let's use the OSF file generated from the SLAM command +and convert it to a PLY file: .. code:: bash - ouster-cli source sample.osf convert output.ply + ouster-cli source sample.osf save output.ply -The convert command automatically splits and downsamples the trajectory-adjusted point cloud into -several files to prevent exporting a huge size file. The terminal will display details, and you will -see the following printout for each output file: +The ``save`` command automatically splits and downsamples the trajectory-adjusted point cloud into +several files to prevent exporting a huge size file. The terminal will display details, and you +will see the following printout for each output file: .. code:: bash @@ -106,11 +188,6 @@ You can use an open source software `CloudCompare`_ to import and view the gener data files. -.. code:: bash - - ouster-cli source sample.osf convert output.las - - .. _Networking Guide: https://static.ouster.dev/sensor-docs/image_route1/image_route3/networking_guide/networking_guide.html .. _CloudCompare: https://www.cloudcompare.org/ diff --git a/docs/cli/sample-sessions.rst b/docs/cli/sample-sessions.rst index 88fcea30..401ad7e5 100644 --- a/docs/cli/sample-sessions.rst +++ b/docs/cli/sample-sessions.rst @@ -46,19 +46,18 @@ Let's see what the sensor is seeing in a pretty visualizer: $ ouster-cli source viz -That looked nice! Let's record ten seconds of data to a pcap so we can view it on repeat! +That looked nice! Let's record some data to a pcap so we can view it on repeat! .. code:: bash - $ ouster-cli source record -s 10 + $ ouster-cli source save .pcap That should produce screen output that looks something like: .. code:: bash Connecting to - Recording for up to 10.0 seconds... - Wrote X GiB to ./OS--_.pcap + Saving PCAP file at ./OS--_.pcap Go ahead and look in the current directory for the named pcap file and associated metadata file. diff --git a/docs/conf.py b/docs/conf.py index b414ea34..652165be 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -1,4 +1,6 @@ -import sphinx_rtd_theme # noqa +import sphinx_rtd_theme # noqa +import os +import json # Configuration file for the Sphinx documentation builder. # @@ -27,6 +29,11 @@ project = 'Ouster Sensor SDK' copyright = '2022, Ouster, Inc.' author = 'Ouster SW' +# -- Project variables ----------------------------------------------------- + +ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) +base_url = f"{os.environ.get('docs_url')}" +print(f"base_url: {base_url}, ROOT_DIR: {ROOT_DIR}") # use SDK source location from environment or try to guess SRC_PATH = os.path.dirname(os.path.abspath(__file__)) @@ -67,6 +74,7 @@ def parse_version(): 'sphinx_tabs.tabs', 'breathe', 'sphinx_rtd_size', + "sphinx.ext.graphviz" ] # Page width @@ -88,7 +96,6 @@ def parse_version(): # This pattern also affects html_static_path and html_extra_path. exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store'] - # -- Options for HTML output ------------------------------------------------- # The theme to use for HTML and HTML Help pages. See the documentation for @@ -116,7 +123,9 @@ def parse_version(): 'github_repo': 'ouster_example', # 'github_version': 'ouster/python-bindings', 'github_version': 'master', - 'conf_py_path': '/docs/' + 'conf_py_path': '/docs/', + 'versions': [["latest", base_url]], + 'current_version': "latest" } # show Ouster logo in sidebar header @@ -131,8 +140,6 @@ def parse_version(): html_css_files = [ 'css/ouster_rtd_tweaks.css', ] - - # -- Extension configuration ------------------------------------------------- # use both class and constructor docstrings @@ -161,18 +168,15 @@ def parse_version(): todo_emit_warnings = True # copybutton configs -# Note: last entry treats four spaces as a prompt to support "continuation lines" -copybutton_prompt_text = r'>>> |\.\.\. |\$ |PS > |C:\\> |> | ' +copybutton_prompt_text = r'PS >' copybutton_prompt_is_regexp = True +copybutton_exclude = '.linenos, .gp' # tabs behavior sphinx_tabs_disable_tab_closing = True - # -- Doxygen XML generation handlers ----------------------------------- - def do_doxygen_generate_xml(app): - # Only runs is breathe projects exists if not app.config["breathe_projects"]: return @@ -190,7 +194,9 @@ def do_doxygen_generate_xml(app): dictionary = { 'project': app.config.project, 'version': app.config.release, - 'output_dir': doxygen_output_dir + 'output_dir': doxygen_output_dir, + 'warn_log_file': os.path.join( + doxygen_output_dir, "warning_log.log") } with open(os.path.join(app.confdir, 'Doxyfile'), 'r') as template_file: @@ -215,7 +221,14 @@ def do_doxygen_temp_cleanup(app, exception): def setup(app): - # Add a hook for generating doxygen xml and cleaning up app.connect("builder-inited", do_doxygen_generate_xml) app.connect("build-finished", do_doxygen_temp_cleanup) + +# read all versions from the JSON file +# This is displayed in the footer +# Duplicate versions.json file to allow building sdk docs independently +with open(f"versions.json", "r") as file: + versions = json.load(file) +for v in versions: + html_context['versions'].append([v["version"], base_url + '/' + v["version"]]) diff --git a/docs/cpp/api.rst b/docs/cpp/api.rst index c9888bec..81a41d6b 100644 --- a/docs/cpp/api.rst +++ b/docs/cpp/api.rst @@ -7,6 +7,7 @@ CPP API Documentation ouster_client ouster_pcap + ouster_osf .. todo:: Uncomment ``ouster_viz`` section below when we fix C++ PointViz doxygen comments in code diff --git a/docs/cpp/building.rst b/docs/cpp/building.rst index b4570988..6d1b9ab0 100644 --- a/docs/cpp/building.rst +++ b/docs/cpp/building.rst @@ -77,10 +77,10 @@ for dependencies. Follow the official documentation to set up your build environ `_ * `Visual Studio CPP Support `_ -* `Vcpkg, at tag "2023.02.24" installed and integrated with Visual Studio +* `Vcpkg, at tag "2023.10.19" installed and integrated with Visual Studio `_ -**Note** You'll need to run ``git checkout 2023.02.24`` in the vcpkg directory before bootstrapping +**Note** You'll need to run ``git checkout 2023.10.19`` in the vcpkg directory before bootstrapping to use the correct versions of the dependencies. Building may fail unexpectedly if you skip this step. diff --git a/docs/cpp/examples/index.rst b/docs/cpp/examples/index.rst new file mode 100644 index 00000000..1405be1f --- /dev/null +++ b/docs/cpp/examples/index.rst @@ -0,0 +1,9 @@ +============ +CPP Examples +============ + +.. toctree:: + :caption: CPP Examples + + Simple Examples + OSF Examples diff --git a/docs/cpp/examples.rst b/docs/cpp/examples/simple_examples.rst similarity index 83% rename from docs/cpp/examples.rst rename to docs/cpp/examples/simple_examples.rst index 3d5a55a6..77fdc056 100644 --- a/docs/cpp/examples.rst +++ b/docs/cpp/examples/simple_examples.rst @@ -56,8 +56,8 @@ persistence which can be set as above. The automatic udp destination flag cannot Working with LidarScans ----------------------- -The :cpp:class:`ouster::LidarScan` is explained in depth conceptually in the `LidarScan reference -`_. Here we cover some specifics that will be useful for C++ developers. +The :cpp:class:`ouster::LidarScan` is explained in depth conceptually in the :ref:`LidarScan reference +`. Here we cover some specifics that will be useful for C++ developers. LidarScan constructors @@ -158,5 +158,34 @@ In the following code, ``transformation`` represents the extrinsincs transform: :start-after: [doc-stag-extrinsics-to-xyzlut] :end-before: [doc-etag-extrinsics-to-xyzlut] :dedent: + + +Reading Scans From An OSF File +++++++++++++++++++++++++++++++ + +The OSF file is a common format used to store Ouster sensor data. It can be useful to read +the file outside of the ``ouster-cli`` utility in order to perform more advanced processing. + +Below you can see an example which reads each scan in an OSF and prints them to stdout: + +.. literalinclude:: /../examples/osf_reader_example.cpp + :language: cpp + :start-after: [doc-stag-osf-read-cpp] + :end-before: [doc-etag-osf-read-cpp] + :dedent: +Writing Scans To An OSF File +++++++++++++++++++++++++++++ + +An API for writing to the OSF file format is also exposed. This is most often used for writing +scans and metadata, possibly with a reduced number of fields in order to save data. + +Below you can see an example which creates a scan and writes it to an OSF File using the Writer API: + +.. literalinclude:: /../examples/osf_writer_example.cpp + :language: cpp + :start-after: [doc-stag-osf-write-cpp] + :end-before: [doc-etag-osf-write-cpp] + :dedent: + diff --git a/docs/cpp/ouster_client/types.rst b/docs/cpp/ouster_client/types.rst index 85dfa195..b7d90cf9 100644 --- a/docs/cpp/ouster_client/types.rst +++ b/docs/cpp/ouster_client/types.rst @@ -157,7 +157,8 @@ Packet Format .. doxygenclass:: ouster::sensor::packet_format :members: -.. doxygenfunction:: ouster::sensor::get_format +.. doxygengroup:: OusterClientTypeGetFormat + :content-only: MISC ==== diff --git a/docs/cpp/ouster_osf/basics.rst b/docs/cpp/ouster_osf/basics.rst new file mode 100644 index 00000000..e1263647 --- /dev/null +++ b/docs/cpp/ouster_osf/basics.rst @@ -0,0 +1,36 @@ +======== +basics.h +======== + +General +------- +.. doxygentypedef:: ouster::osf::ts_t + +.. doxygenenum:: ouster::osf::OSF_VERSION + +.. doxygenvariable:: ouster::osf::FLATBUFFERS_PREFIX_LENGTH + +.. doxygenfunction:: ouster::osf::to_string(const HEADER_STATUS status) + +Buffer Operations +----------------- +.. doxygenfunction:: ouster::osf::get_prefixed_size + +.. doxygenfunction:: ouster::osf::get_block_size + +.. doxygenfunction:: ouster::osf::check_prefixed_size_block_crc + +.. doxygenfunction:: ouster::osf::to_string(const uint8_t* buf, const size_t count, const size_t max_show_count = 0) + +Batching +-------- +.. doxygengroup:: OsfBatchingFunctions + :content-only: + +ChunksLayout +------------ +.. doxygenfunction:: ouster::osf::to_string(ChunksLayout chunks_layout) + +.. doxygenfunction:: ouster::osf::chunks_layout_of_string + +.. doxygenenum:: ouster::osf::ChunksLayout diff --git a/docs/cpp/ouster_osf/crc32.rst b/docs/cpp/ouster_osf/crc32.rst new file mode 100644 index 00000000..ebbe1da2 --- /dev/null +++ b/docs/cpp/ouster_osf/crc32.rst @@ -0,0 +1,8 @@ +======= +crc32.h +======= + +.. doxygenvariable:: ouster::osf::CRC_BYTES_SIZE + +.. doxygengroup:: OsfCRCFunctions + :content-only: diff --git a/docs/cpp/ouster_osf/file.rst b/docs/cpp/ouster_osf/file.rst new file mode 100644 index 00000000..3bb4d704 --- /dev/null +++ b/docs/cpp/ouster_osf/file.rst @@ -0,0 +1,12 @@ +====== +file.h +====== + +.. doxygentypedef:: ouster::osf::ChunkBuffer + +.. doxygenenum:: ouster::osf::OpenMode + +.. doxygenenum:: ouster::osf::FileState + +.. doxygenclass:: ouster::osf::OsfFile + :members: diff --git a/docs/cpp/ouster_osf/index.rst b/docs/cpp/ouster_osf/index.rst new file mode 100644 index 00000000..86fb6fb5 --- /dev/null +++ b/docs/cpp/ouster_osf/index.rst @@ -0,0 +1,21 @@ +============== +Ouster OSF API +============== + +.. toctree:: + :caption: Ouster OSF API + + basics.h + crc32.h + file.h + layout_streaming.h + meta_extrinsics.h + meta_lidar_sensor.h + meta_streaming_info.h + metadata.h + operations.h + pcap_source.h + reader.h + stream_lidar_scan.h + writer.h + writerv2.h diff --git a/docs/cpp/ouster_osf/layout_streaming.rst b/docs/cpp/ouster_osf/layout_streaming.rst new file mode 100644 index 00000000..88f46444 --- /dev/null +++ b/docs/cpp/ouster_osf/layout_streaming.rst @@ -0,0 +1,9 @@ +================== +layout_streaming.h +================== + +.. doxygengroup:: OSFStreamingDefaultSize + :content-only: + +.. doxygenclass:: ouster::osf::StreamingLayoutCW + :members: diff --git a/docs/cpp/ouster_osf/meta_extrinsics.rst b/docs/cpp/ouster_osf/meta_extrinsics.rst new file mode 100644 index 00000000..b56ec410 --- /dev/null +++ b/docs/cpp/ouster_osf/meta_extrinsics.rst @@ -0,0 +1,10 @@ +================= +meta_extrinsics.h +================= + +.. doxygenclass:: ouster::osf::Extrinsics + :members: + +.. doxygengroup:: OSFTraitsExtrinsics + :members: + :content-only: diff --git a/docs/cpp/ouster_osf/meta_lidar_sensor.rst b/docs/cpp/ouster_osf/meta_lidar_sensor.rst new file mode 100644 index 00000000..8235690a --- /dev/null +++ b/docs/cpp/ouster_osf/meta_lidar_sensor.rst @@ -0,0 +1,10 @@ +=================== +meta_lidar_sensor.h +=================== + +.. doxygenclass:: ouster::osf::LidarSensor + :members: + +.. doxygengroup:: OSFTraitsLidarSensor + :members: + :content-only: diff --git a/docs/cpp/ouster_osf/meta_streaming_info.rst b/docs/cpp/ouster_osf/meta_streaming_info.rst new file mode 100644 index 00000000..e5148b33 --- /dev/null +++ b/docs/cpp/ouster_osf/meta_streaming_info.rst @@ -0,0 +1,27 @@ +===================== +meta_streaming_info.h +===================== + +ChunkInfo +--------- +.. doxygenstruct:: ouster::osf::ChunkInfo + :members: + +.. doxygenfunction:: ouster::osf::to_string(const ChunkInfo& chunk_info) + +StreamStats +----------- +.. doxygenstruct:: ouster::osf::StreamStats + :members: + +.. doxygenfunction:: ouster::osf::to_string(const StreamStats& stream_stats) + +Streaming Info +-------------- +.. doxygenclass:: ouster::osf::StreamingInfo + :members: + +.. doxygengroup:: OSFTraitsStreamingInfo + :members: + :content-only: + diff --git a/docs/cpp/ouster_osf/metadata.rst b/docs/cpp/ouster_osf/metadata.rst new file mode 100644 index 00000000..6310df2f --- /dev/null +++ b/docs/cpp/ouster_osf/metadata.rst @@ -0,0 +1,35 @@ +========== +metadata.h +========== + +Public API +========== + +.. doxygenclass:: ouster::osf::MetadataEntry + :members: + +.. doxygenclass:: ouster::osf::MetadataEntryRef + :members: + +.. doxygenclass:: ouster::osf::MetadataStore + :members: + +.. doxygenfunction:: ouster::osf::metadata_pointer_as + + +Internal API +============ +.. doxygenclass:: ouster::osf::MetadataEntryHelper + :members: + +.. doxygenstruct:: ouster::osf::RegisterMetadata + :members: + :allow-dot-graphs: + +.. doxygenstruct:: ouster::osf::MetadataTraits + :members: + +.. doxygenstruct:: ouster::osf::MessageStream + :members: + +.. doxygenfunction:: ouster::osf::metadata_type diff --git a/docs/cpp/ouster_osf/operations.rst b/docs/cpp/ouster_osf/operations.rst new file mode 100644 index 00000000..e42bb830 --- /dev/null +++ b/docs/cpp/ouster_osf/operations.rst @@ -0,0 +1,15 @@ +============ +operations.h +============ + +.. doxygenfunction:: ouster::osf::dump_metadata + +.. doxygenfunction:: ouster::osf::parse_and_print + +.. doxygenfunction:: ouster::osf::backup_osf_file_metablob + +.. doxygenfunction:: ouster::osf::restore_osf_file_metablob + +.. doxygenfunction:: ouster::osf::osf_file_modify_metadata + +.. doxygenfunction:: ouster::osf::pcap_to_osf diff --git a/docs/cpp/ouster_osf/pcap_source.rst b/docs/cpp/ouster_osf/pcap_source.rst new file mode 100644 index 00000000..f4ff67c3 --- /dev/null +++ b/docs/cpp/ouster_osf/pcap_source.rst @@ -0,0 +1,6 @@ +============= +pcap_source.h +============= + +.. doxygenclass:: ouster::osf::PcapRawSource + :members: diff --git a/docs/cpp/ouster_osf/reader.rst b/docs/cpp/ouster_osf/reader.rst new file mode 100644 index 00000000..88a93cf4 --- /dev/null +++ b/docs/cpp/ouster_osf/reader.rst @@ -0,0 +1,66 @@ +======== +reader.h +======== + +ChunkValidity +------------- +.. doxygenenum:: ouster::osf::ChunkValidity + +ChunkState +---------- +.. doxygenstruct:: ouster::osf::ChunkState + :members: + +.. doxygenfunction:: ouster::osf::to_string(const ChunkState& chunk_state) + +ChunkInfoNode +------------- +.. doxygenstruct:: ouster::osf::ChunkInfoNode + :members: + +.. doxygenfunction:: ouster::osf::to_string(const ChunkInfoNode& chunk_info) + +ChunksPile +---------- +.. doxygenclass:: ouster::osf::ChunksPile + :members: + +ChunksIter +---------- +.. doxygenstruct:: ouster::osf::ChunksIter + :members: + +ChunksRange +----------- +.. doxygenclass:: ouster::osf::ChunksRange + :members: + +Reader +------ +.. doxygenclass:: ouster::osf::Reader + :members: + +MessageRef +---------- +.. doxygenclass:: ouster::osf::MessageRef + :members: + +ChunkRef +-------- +.. doxygenclass:: ouster::osf::ChunkRef + :members: + +MessagesChunkIter +----------------- +.. doxygenstruct:: ouster::osf::MessagesChunkIter + :members: + +MessagesStreamingRange +---------------------- +.. doxygenclass:: ouster::osf::MessagesStreamingRange + :members: + +MessagesStreamingIter +--------------------- +.. doxygenstruct:: ouster::osf::MessagesStreamingIter + :members: diff --git a/docs/cpp/ouster_osf/stream_lidar_scan.rst b/docs/cpp/ouster_osf/stream_lidar_scan.rst new file mode 100644 index 00000000..78b261b2 --- /dev/null +++ b/docs/cpp/ouster_osf/stream_lidar_scan.rst @@ -0,0 +1,18 @@ +=================== +stream_lidar_scan.h +=================== + +.. doxygenstruct:: ouster::osf::zero_field + :members: + +.. doxygenclass:: ouster::osf::LidarScanStreamMeta + :members: + +.. doxygengroup:: OSFTraitsLidarScanStreamMeta + :members: + :content-only: + +.. doxygenclass:: ouster::osf::LidarScanStream + :members: + +.. doxygenfunction:: ouster::osf::slice_with_cast diff --git a/docs/cpp/ouster_osf/writer.rst b/docs/cpp/ouster_osf/writer.rst new file mode 100644 index 00000000..33290bf5 --- /dev/null +++ b/docs/cpp/ouster_osf/writer.rst @@ -0,0 +1,12 @@ +======== +writer.h +======== + +.. doxygenclass:: ouster::osf::ChunksWriter + :members: + +.. doxygenclass:: ouster::osf::Writer + :members: + +.. doxygenclass:: ouster::osf::ChunkBuilder + :members: diff --git a/docs/cpp/ouster_osf/writerv2.rst b/docs/cpp/ouster_osf/writerv2.rst new file mode 100644 index 00000000..6b4fdc48 --- /dev/null +++ b/docs/cpp/ouster_osf/writerv2.rst @@ -0,0 +1,7 @@ +========== +writerv2.h +========== + +.. doxygenclass:: ouster::osf::WriterV2 + :members: + diff --git a/docs/cpp/ouster_pcap/os_pcap.rst b/docs/cpp/ouster_pcap/os_pcap.rst index 43cbb2b6..0cac0fdb 100644 --- a/docs/cpp/ouster_pcap/os_pcap.rst +++ b/docs/cpp/ouster_pcap/os_pcap.rst @@ -33,8 +33,6 @@ Functions .. doxygenfunction:: ouster::sensor_utils::read_packet -.. doxygenfunction:: ouster::sensor_utils::record_initialize( const std::string& file, const std::string& src_ip, const std::string& dst_ip, int frag_size, bool use_sll_encapsulation = false) - .. doxygenfunction:: ouster::sensor_utils::record_initialize(const std::string& file, int frag_size, bool use_sll_encapsulation = false); .. doxygenfunction:: ouster::sensor_utils::record_uninitialize diff --git a/docs/index.rst b/docs/index.rst index b96b3094..a11d27a6 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -12,7 +12,9 @@ :hidden: Developer Quick Start + Using ScanSource API Examples + SLAM Quickstart Point Cloud Visualizer Developing @@ -21,7 +23,7 @@ :hidden: Build - Examples + Examples .. toctree:: :caption: ROS1 Guide @@ -33,6 +35,7 @@ :caption: SDK Reference :hidden: + OSF Lidar Scan API Python API Reference C++ API Reference @@ -54,6 +57,7 @@ Migrating from 20220927/0.5.1 to 20230114/0.7.1 Migrating from 20230114/0.7.1 to 20230403/0.8.1 + Migration from 20231031/0.10.0 to 20240423/0.11.0 .. FAQ diff --git a/docs/installation.rst b/docs/installation.rst index 82582f15..457f1150 100644 --- a/docs/installation.rst +++ b/docs/installation.rst @@ -29,9 +29,9 @@ Supported Platforms Installation -------------- -The Ouster Python SDK binary packages require Python >= 3.7 and pip >= 19.0 on most platforms. On +The Ouster Python SDK binary packages require Python >= 3.8 and pip >= 19.0 on most platforms. On Ubuntu 18.04, the default Python 3 version is is 3.6, so you'll have to install and use -``python3.7`` explicitly. On Apple M1, you'll need need Python >= 3.8. +``python3.8`` explicitly. On Apple M1, you'll need need Python >= 3.8. .. note:: @@ -56,17 +56,17 @@ the Ouster Python SDK, you will need to build from source. See the `build instru requirements needed to build from a source distribution or from a clone of the repository. To install on :ref:`supported platforms`, first make sure you have the latest -version of pip: +version of pip and setuptools: .. tabs:: .. code-tab:: console Linux/macOS - $ python3 -m pip install --upgrade pip + $ python3 -m pip install --upgrade pip setuptools .. code-tab:: powershell Windows x64 - PS > py -3 -m pip install --upgrade pip + PS > py -3 -m pip install --upgrade pip setuptools Now that your Python environment has an up-to-date pip, you can install ouster-sdk: diff --git a/docs/migration/migration-20231031-20240423.rst b/docs/migration/migration-20231031-20240423.rst new file mode 100644 index 00000000..41f4f926 --- /dev/null +++ b/docs/migration/migration-20231031-20240423.rst @@ -0,0 +1,71 @@ +================================================= +Migration from 20231031/0.10.0 to 20240423/0.11.0 +================================================= + +The 20240423 release, which corresponds to Python SDK 0.11.0, brings a few breaking changes. The +changes and how to mitigate them are summarized here. + +ouster.sdk module changes ++++++++++++++++++++++++++ +Starting with ouster-sdk v0.11.0, most of core python sdk objects have been moved from the ``ouster`` +namespace into the ``ouster.sdk`` namespace. For example, before ouster-sdk 0.11.0 users used to access +the ``client`` object using: + +.. code:: python + + >>> from ouster import client + + +This would still work but will generate a warning message asking the user to update the path to ``ouster.sdk``. +Thus the update module path would be: + +.. code:: python + + >>> from ouster.sdk import client + +This same change applies to ``ouster.pcap``, ``ouster.viz``, ``ouster.osf``. Each of these modules have been +relocated to ``ouster.sdk.pcap``, ``ouster.sdk.viz``, ``ouster.sdk.osf`` + + +Using the new ScanSource +++++++++++++++++++++++++ + +Starting with ouster-sdk v0.11.0, the sdk includes a unified API for accessing LidarScan(s) from the three +supported source types: live sensors, pcap files, and osf files. This completely replaces the previous objects +that were used to handle each source type. For example, the user used to write the following to manipulate pcap +files: + +.. code:: python + + from ouster import client + from ouster.pcap import Pcap + from ouster.sdk.util import resolve_metadata + metadata = open(resolve_metadata(source), "r").read() + info = client.SensorInfo(metadata) + pcap = Pcap(source, info) + scans = client.Scans(pcap, fields=fields) + +but to stream from a live sensor the user had to do the following: + +.. code:: python + + from ouster import client + + scans = client.Scans.stream(hostname=source) + + +This has been replaced with the unified ``ScanSource`` interface which can be easily constructed using a single method + +.. code:: python + + from ouster.sdk import open_source + scans = open_source(source_url) # where source_url can be a path to pcap or an osf file or a sensor url + + +Notes for the future +-------------------- + + +Dropped Support ++++++++++++++++ +Next iterations of the Ouster-SDK is going to require C++17 standard at minimum to compile diff --git a/docs/overview.rst b/docs/overview.rst index ce58f80f..f99df151 100644 --- a/docs/overview.rst +++ b/docs/overview.rst @@ -35,22 +35,23 @@ e.g., the SDK version 20210608 (ouster-sdk 0.2.0) is not compatible with FW 3.0. The following table indicates the compatibility of each released SDK version and its FW compatibility: -===================================== ======= ======= ======= ======= ======= ======= ======= ======= ======= -SDK Tag (Release) / Python SDK FW 1.13 FW 2.0 FW 2.1 FW 2.2 FW 2.3 FW 2.4 FW 2.5 FW 3.0 FW 3.1 -===================================== ======= ======= ======= ======= ======= ======= ======= ======= ======= -C++ SDK 20231031 / Python SDK 0.10.0 no **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** -C++ SDK 20230710 / Python SDK 0.9.0 no **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** -C++ SDK 20230403 / Python SDK 0.8.1 no **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** -C++ SDK 20230114 / Python SDK 0.7.1 no **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** -C++ SDK 20220927 / Python SDK 0.5.2 no **yes** **yes** **yes** **yes** **yes** no no no -C++ SDK 20220826 / Python SDK 0.5.1 no **yes** **yes** **yes** **yes** **yes** no no no -C++ SDK 20220608 / Python SDK 0.4.1 **yes** **yes** **yes** **yes** **yes** no no no no -C++ SDK 20220504 / Python SDK 0.4.0 **yes** **yes** **yes** **yes** **yes** no no no no -C++ SDK 20220107 / Python SDK 0.3.0 **yes** **yes** **yes** **yes** no no no no no -C++ SDK 20210608 / Python SDK 0.2.1 **yes** **yes** **yes** no no no no no no -C++ SDK 20201209 / n/a **yes** **yes** **yes** no no no no no no -C++ SDK v1.13.0 / n/a **yes** no no no no no no no no -===================================== ======= ======= ======= ======= ======= ======= ======= ======= ======= +===================================== ======= ======= ======= ======= ======= ======= ======= ======= +SDK Tag (Release) / Python SDK FW 2.0 FW 2.1 FW 2.2 FW 2.3 FW 2.4 FW 2.5 FW 3.0 FW 3.1 +===================================== ======= ======= ======= ======= ======= ======= ======= ======= +C++ SDK 20240423 / Python SDK 0.11.0 **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** +C++ SDK 20231031 / Python SDK 0.10.0 **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** +C++ SDK 20230710 / Python SDK 0.9.0 **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** +C++ SDK 20230403 / Python SDK 0.8.1 **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** +C++ SDK 20230114 / Python SDK 0.7.1 **yes** **yes** **yes** **yes** **yes** **yes** **yes** **yes** +C++ SDK 20220927 / Python SDK 0.5.2 **yes** **yes** **yes** **yes** **yes** no no no +C++ SDK 20220826 / Python SDK 0.5.1 **yes** **yes** **yes** **yes** **yes** no no no +C++ SDK 20220608 / Python SDK 0.4.1 **yes** **yes** **yes** **yes** no no no no +C++ SDK 20220504 / Python SDK 0.4.0 **yes** **yes** **yes** **yes** no no no no +C++ SDK 20220107 / Python SDK 0.3.0 **yes** **yes** **yes** no no no no no +C++ SDK 20210608 / Python SDK 0.2.1 **yes** **yes** no no no no no no +C++ SDK 20201209 / n/a **yes** **yes** no no no no no no +C++ SDK v1.13.0 / n/a no no no no no no no no +===================================== ======= ======= ======= ======= ======= ======= ======= ======= If you are a C++ SDK user who has upgraded to the latest FW but requires an older SDK version, please contact our customer support or the Field Applications Engineer who works with you. diff --git a/docs/python/api/index.rst b/docs/python/api/index.rst index f8e2dde2..2bc47884 100644 --- a/docs/python/api/index.rst +++ b/docs/python/api/index.rst @@ -8,4 +8,4 @@ API Reference pcap viz examples - + osf diff --git a/docs/python/api/osf.rst b/docs/python/api/osf.rst new file mode 100644 index 00000000..dbf9d26d --- /dev/null +++ b/docs/python/api/osf.rst @@ -0,0 +1,122 @@ +============================ +Module :mod:`ouster.sdk.osf` +============================ + +.. contents:: + :local: + :depth: 4 + +.. automodule:: ouster.sdk.osf + +---- + +Low-Level API +------------- + +Reading +^^^^^^^ + +``Reader`` for OSF files +~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: Reader + :members: + :undoc-members: + + +``MessageRef`` wrapper for a `message` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MessageRef + :members: + :undoc-members: + +``ChunkRef`` wrapper for a `chunk` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: ChunkRef + :members: + :undoc-members: + :special-members: __len__ + +``MetadataStore`` for `metadata entries` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MetadataStore + :members: + :undoc-members: + +``MetadataEntry`` base class for all metadatas +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: MetadataEntry + :members: + :undoc-members: + + +Writing OSF files +^^^^^^^^^^^^^^^^^ + +``Writer`` to create OSF file +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: Writer + :members: + :undoc-members: + +.. autoclass:: ChunksLayout + :members: + :undoc-members: + + +Common `metadata entries` +^^^^^^^^^^^^^^^^^^^^^^^^^ + +``LidarSensor`` Ouster sensor metadata (i.e. ``client.SensorInfo``) +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: LidarSensor + :members: + :undoc-members: + +``StreamStats`` statistics per stream +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: StreamStats + :members: + :undoc-members: + +``StreamingInfo`` stream statisitcs +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: StreamingInfo + :members: + :undoc-members: + + +Common `streams` +^^^^^^^^^^^^^^^^ + +``LidarScanStream`` stream +~~~~~~~~~~~~~~~~~~~~~~~~~~ + +.. autoclass:: LidarScanStream + :members: + :undoc-members: + + +High-Level API +-------------- + +``osf.Scans`` just read ``LidarScan`` objects from a file +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +.. autoclass:: Scans + :members: + :special-members: __iter__ + +``osf.WriterV2`` Higher level writer api. +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ +.. autoclass:: WriterV2 + :members: + :special-members: __enter__ __exit__ diff --git a/docs/python/devel.rst b/docs/python/devel.rst index 80ee4291..a431cbbf 100644 --- a/docs/python/devel.rst +++ b/docs/python/devel.rst @@ -21,7 +21,7 @@ Building the Python SDK from source requires several dependencies: - `libglfw3 `_ >= 3.2 - `libglew `_ >= 2.1 or `glad `_ - `spdlog `_ >= 1.9 -- `Python `_ >= 3.7 (with headers and development libraries) +- `python `_ >= 3.8 (with headers and development libraries) - `pybind11 `_ >= 2.0 The Python SDK source is available `on the Ouster Github `_. You should clone the whole project. @@ -39,7 +39,7 @@ On supported Debian-based linux systems, you can install all build dependencies libglfw3-dev libglew-dev libspdlog-dev \ libpng-dev libflatbuffers-dev -On macos >= 10.13, using homebrew, you should be able to run: +On macos >= 11, using homebrew, you should be able to run: .. code:: console @@ -52,8 +52,8 @@ After you have the system dependencies, you can build the SDK with: # first, specify the path to the ouster_example repository $ export OUSTER_SDK_PATH= - # make sure you have an up-to-date version of pip installed - $ python3 -m pip install --user --upgrade pip + # make sure you have an up-to-date version of pip and setuptools installed + $ python3 -m pip install --user --upgrade pip setuptools # install pybind11 $ python3 -m pip install pybind11 @@ -82,7 +82,7 @@ package manager and run: PS > vcpkg install --triplet=x64-windows curl eigen3 jsoncpp libtins glfw3 glad[gl-api-33] spdlog libpng flatbuffers -The currently tested vcpkg tag is ``2023.02.24``. After that, using a developer powershell prompt: +The currently tested vcpkg tag is ``2023.10.19``. After that, using a developer powershell prompt: .. code:: powershell diff --git a/docs/python/examples/index.rst b/docs/python/examples/index.rst index feeb8a42..91b7ed81 100644 --- a/docs/python/examples/index.rst +++ b/docs/python/examples/index.rst @@ -27,5 +27,6 @@ Similarly, ``$SENSOR_HOSTNAME`` is used for your sensor's hostname. record-stream visualizations conversion + osf-examples diff --git a/docs/python/examples/osf-examples.rst b/docs/python/examples/osf-examples.rst new file mode 100644 index 00000000..1f0087cf --- /dev/null +++ b/docs/python/examples/osf-examples.rst @@ -0,0 +1,160 @@ +=================== +OSF Python Examples +=================== + +.. contents:: + :local: + :depth: 3 + +Ouster Python API for OSF +^^^^^^^^^^^^^^^^^^^^^^^^^ + +Python OSF Reader/Writer API is a Python binding to the ``C++`` OSF Reader/Writer implementation +which means that all reading and writing operations works at native speeds. + + +All examples below assume that a user has an ``osf_file`` variable with a path to an OSF file and +``ouster.osf`` package is imported: + +.. code:: + + import ouster.osf as osf + + osf_file = 'path/to/osf_file.osf' + +You can use ``ouster-cli source .... save`` commands to generate a test OSF file to test any of the examples. + +Every example is wrapped into a CLI and available for quick tests by running +``python3 -m ouster.sdk.examples.osf ``: + +.. code:: bash + + $ python3 -m ouster.sdk.examples.osf --help + + usage: osf.py [-h] [--scan-num SCAN_NUM] OSF EXAMPLE + + Ouster Python SDK OSF examples. The EXAMPLE must be one of: + read-scans + read-messages + split-scans + slice-scans + get-lidar-streams + get-sensors-info + check-layout + +For example to execute the ``get-lidar-streams`` example you can run: + +.. code:: bash + + $ python3 -m ouster.sdk.examples.osf get-lidar-streams + + +Read Lidar Scans with ``osf.Scans`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``osf.Scans()`` interface is the simplest way to get all ``LidarScan`` objects for the first sensor +that was found in an OSF (majority of our test data uses only a single sensor recordings): + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-read-scans] + :end-before: [doc-etag-osf-read-scans] + :dedent: + +Underneath it looks for available sensor streams, peeks first, creates the ``osf.Reader``, reads the +**messages** and decodes them to ``LidarScan`` objects. + +.. admonition:: Note about timestamp ``ts`` + + All messages in an OSF are stored with a timestamp so it's an essential part of the stream + during the read operation. If later you will decide to store the post-processed ``LidarScan`` + back into another OSF it's better to preserve the original ``ts`` which usually came from + NIC/PCAP/BAG headers. To get ``ts`` along with ``LidarScan`` use ``osf.Scans().withTs()`` + iterator. + +Get Sensors Info with ``osf.Reader`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +``osf.Reader`` is the base ``Reader`` interface that get info about ``start/end_ts``, reads and +decodes all **metadata entries**, get access to **chunks** and **messages** of the OSF file. + +Sensors information is stored as ``osf.LidarSensor`` metadata entry and can be read with the +``reader.meta_store.find()`` function that returns all metadata entry of the specified type (in our +case it's of type ``osf.LidarSensor``): + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-get-sensors-info] + :end-before: [doc-etag-osf-get-sensors-info] + :dedent: + + +Read All Messages with ``osf.Reader`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +With ``osf.Reader``, you can use ``reader.messages()`` iterator to read messages in ``timestamp`` +order. + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-read-all-messages] + :end-before: [doc-etag-osf-read-all-messages] + :dedent: + + +Checking Chunks Layout via ``osf.StreamingInfo`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Building on top of an example from above we can check for stream +statistics information from ``osf.StreamingInfo``: + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-check-layout] + :end-before: [doc-etag-osf-check-layout] + :dedent: + +For more information about ``osf.StreamingInfo`` metadata entry please refer to [RFC 0018]_. + + +Get Lidar Scan streams info via ``osf.LidarScanStream`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Every message in an OSF belongs to a stream of a particular type (i.e. ``osf.LidarScanStream``, +``osf.LidarImuStream``, etc.). Streams information stored as **metadata entry** within +``osf.Reader.meta_store`` object that can be read and decoded in various ways. Below is an example +of how we can check parameters of an available LidarScan streams (``osf.LidarScanStream``) by +checking the metadata entries: + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-get-lidar-streams] + :end-before: [doc-etag-osf-get-lidar-streams] + :dedent: + + +Write Lidar Scan with sliced fields with ``osf.Writer`` +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +We will look into the ``osf.Writer`` example on the task of re-coding the available OSF file into Lidar +Scans with a reduced fields. By reduce fields we mean here that if LidarScan has ``7`` channel +fields, we can keep only ``3`` and save the disk space and bandwidth during replay. + +A general scheme of writing scans to the OSF with Writer: + +0. Create ``osf.Writer`` with the output file name, lidar metadata(s) (``ouster.sdk.client.SensorInfo``) and optionally the desired output scan fields. +1. Use the writers's ``save`` function ``writer.save(index, scan)`` to encode the LidarScan ``scan`` into the + underlying message buffer for lidar ``index`` and finally push it to disk. If you have multiple lidars you can + save the scans simultaneously by providing them in an array to ``writer.save``. + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-slice-scans] + :end-before: [doc-etag-osf-slice-scans] + :dedent: + + +Split Lidar Scan stream into multiple files +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + +Another example of using ``osf.Writer`` that we will see is the splitting of Lidar Scan stream from +one OSF file into 2 files. + +.. literalinclude:: /../python/src/ouster/sdk/examples/osf.py + :start-after: [doc-stag-osf-split-scans] + :end-before: [doc-etag-osf-split-scans] + :dedent: diff --git a/docs/python/quickstart.rst b/docs/python/quickstart.rst index 23e4d423..a52ecf1a 100644 --- a/docs/python/quickstart.rst +++ b/docs/python/quickstart.rst @@ -56,9 +56,20 @@ In your open python session, save the two paths to variables: Because our pcap file contains the UDP packet stream but not the sensor metadata, we load the sensor information from ``metadata_path`` first, using the client module: +.. note:: + Starting with ouster-sdk v0.11.0, most of core sdk objects have been moved from the ``ouster`` + namespace into the ``ouster.sdk`` namespace. + +.. note:: + Starting with ouster-sdk v0.11.0 we introduce a unified ``ScanSource`` interface that easier to utilize + and is more capable than original objects demonstrated in this quick guide. Please refer to + :ref:`Using ScanSource API ` for details on how to use the new interface. + + + .. code:: python - >>> from ouster import client + >>> from ouster.sdk import client >>> with open(metadata_path, 'r') as f: ... info = client.SensorInfo(f.read()) @@ -68,7 +79,7 @@ captured UDP data by instantiating :py:class:`.pcap.Pcap`. This class acts as a .. code:: python - >>> from ouster import pcap + >>> from ouster.sdk import pcap >>> source = pcap.Pcap(pcap_path, info) To visualize data from this pcap file, proceed to :doc:`/python/examples/visualizations` examples. @@ -123,7 +134,7 @@ Now configure the client: .. code:: python - >>> from ouster import client + >>> from ouster.sdk import client >>> config = client.SensorConfig() >>> config.udp_port_lidar = 7502 >>> config.udp_port_imu = 7503 diff --git a/docs/python/slam-api-example.rst b/docs/python/slam-api-example.rst new file mode 100644 index 00000000..708bed6d --- /dev/null +++ b/docs/python/slam-api-example.rst @@ -0,0 +1,91 @@ +=============== +SLAM Quickstart +=============== + +.. contents:: + :local: + :depth: 3 + +.. _slam-api-example: + +This guide provides examples of using the SLAM API for development purposes. +Users can run SLAM with an OS sensor's hostname or IP for real-time processing, or with a recorded PCAP/OSF file for offline processing. + +.. warning:: + Due to missing upstream dependency support on python3.12, slam does not work on python3.12. + + +Obtain Lidar Pose and Calculate Pose Difference +=============================================== +The SLAM API outputs the sensor's pose for each lidar scan, which you can use to determine the +sensor's orientation in your system. From the Lidar Poses, we can calculate the Pose difference +between consecutive scans + +.. code:: python + + from ouster.sdk import open_source + from ouster.mapping.slam import KissBackend + import numpy as np + scans = open_source(pcap_path, sensor_idx=0) + slam = KissBackend(scans.metadata, max_range=75, min_range=1, voxel_size=1.0) + last_scan_pose = np.eye(4) + + for idx, scan in enumerate(scans): + scan_w_poses = slam.update(scan) + col = client.first_valid_column(scan_w_poses) + # scan_w_poses.pose is a list where each pose represents a column points' pose. + # use the first valid scan's column pose as the scan pose + scan_pose = scan_w_poses.pose[col] + print(f"idx = {idx} and Scan Pose {scan_pose}") + + # calculate the inverse transformation of the last scan pose + inverse_last = np.linalg.inv(last_scan_pose) + # calculate the pose difference by matrix multiplication + pose_diff = np.dot(inverse_last, scan_pose) + # extract rotation and translation + rotation_diff = pose_diff[:3, :3] + translation_diff = pose_diff[:3, 3] + print(f"idx = {idx} and Rotation Difference: {rotation_diff}, " + f"Translation Difference: {translation_diff}") + + +SLAM with Visulizer and Accumulated Scans +========================================= +Visualizers and Accumulated Scans are also available for monitoring the performance of the algorithm, +as well as for demonstration and feedback purposes. + +.. code:: python + + from functools import partial + from ouster.viz import SimpleViz, ScansAccumulator + from ouster.mapping.slam import KissBackend + scans = open_source(pcap_path, sensor_idx=0) + slam = KissBackend(scans.metadata, max_range=75, min_range=1, voxel_size=1.0) + + scans_w_poses = map(partial(slam.update), scans) + scans_acc = ScansAccumulator(info, + accum_max_num=10, + accum_min_dist_num=1, + map_enabled=True, + map_select_ratio=0.01) + + SimpleViz(info, scans_accum=scans_acc, rate=0.0).run(scans_w_poses) + +More details about the visualizer and accumulated scans can be found at the +:ref:`Ouster Visualizer ` and :ref:`Scans Accumulator ` + + +.. note:: + + The performance of the SLAM algorithm depends on your CPU's processing power and the 'voxel_size' + parameter. + Below is a suggestion for selecting an appropriate voxel size: + + | Outdoor: 1.4 - 2.2 + | Large indoor: 1.0 - 1.8 + | Small indoor: 0.4 - 0.8 + + +Intro SLAM in Ouster-CLI +======================== +We also offer a simpler method to run SLAM using the ``ouster-cli``. For additional details, please refer to :ref:`Ouster-CLI Mapping `. diff --git a/docs/python/using-scan-source.rst b/docs/python/using-scan-source.rst new file mode 100644 index 00000000..6c078308 --- /dev/null +++ b/docs/python/using-scan-source.rst @@ -0,0 +1,198 @@ +================================== +Using the new ScanSource interface +================================== + +.. contents:: + :local: + :depth: 3 + +.. _scan-source-example: + +In this example we are going to demonstrate the use of the new ScanSource API. + + +Using the open_source method +============================ + +The new API introduces a new method name ``open_source`` which allows users to handle different source types +using the same API. Current supported source types are live sensor, pcap file, or osf file. For example, to +open the same pcap file referenced in the main :ref:`Quick Start ` using the simplified API +can be accomplished as follows: + +.. code:: python + + >>> pcap_path = '' + >>> metadata_path = '' + >>> from ouster.sdk import open_source + >>> source = open_source(pcap_path, sensor_idx=0, meta=[metadata_path]) + + +The ``source`` handle here acts the same as the handle returned by the ``pcap.Pcap`` constructor, with some +extra capabilities that we will cover later. + +Notice here that besides the ``pcap_path`` we pass two additional parameters: ``sensor_idx`` with a value +of zero, and ``meta`` which we set to the ``metadata_path`` to point to the sensor metadata associated with +the pcap file we are trying to open. Both parameters are optional and can be omitted. In case the ``meta`` +parameter was not omitted, the ``open_source`` method will attempt to locate the metadata associated with +the pcap file based on location and the pcap file prefix. That being said, if ``SAMPLE_DATA_JSON_PATH`` is +located in the same folder as ``SAMPLE_DATA_PCAP_PATH`` and the two files share a prefix we can simplify the +above call to: + +.. code:: python + >>> source = open_source(pcap_path, sensor_idx=0) + + +The second parameter, ``sensor_idx``, allows users to select a specific sensor from the selected source. +That is because starting with ouster-sdk v0.11 Ouster added support for working with sensor data collected from +multiple sensors. The ``open_source`` method by default returns the more complete interface ``MultiScanSource`` +which has the capability to interact with multiple sensor streams, which we will in next section. Setting the +value of ``sensor_idx`` to zero tells ``open_source`` we are only interested in LidarScan data coming from the +first sensor from this specific pcap file in case the file had more than one sensor. By doing so, the +``open_source`` method returns a less sophisticated interface ``ScanSource`` which is more familiar to SDK users +from previous versions. + +The main different between the ``MultiScanSource`` and the ``ScanSource`` is the expected return of some +of the object methods. For example, when creating an iterator for a ``ScanSource`` object, the user would get +a single ``LidarScan`` object per iteration. Iterating over the contents of a ``MultiScanSource`` object always +yields a **list** of ``LidarScan(s)`` per iteration corresponding to the number of sensors stored in the pcap +file or whatever source type is being used. This is true even when the pcap file contains data for a single sensor. + + +On the other hand, if the user wants to open an osf file or access the a live sensor, all that changes is url +of the source. For example, to interact with a live sensor the user can execute the following snippet: + +.. code:: python + + >>> sensor_url = '' + >>> from ouster.sdk import open_source + >>> source = open_source(sensor_url, sensor_idx=0) + + +Obtaining sensor metadata +========================= +Every ScanSource holds a reference to the sensor metadata, which has crucial information that is important when +when processing the invidivual scans. Continuing the example, a user this can access the metadata through the +``metadata`` property of a ``ScanSource`` object: + +.. code:: python + + >>> print(source.metadata) + + +Iterating over Scans +==================== + +Once we have successfully obtain a handle to the ScanSource we can iterate over ``LidarScan`` objects stored in the +pcap file and manipulate each one individually. For example, let's say we want to print the frame id of the first 10 +scans. We can achieve that using: + +.. code:: python + + >>> ctr = 0 + >>> source_iter = iter(source) + >>> for scan in source_iter: + ... print(scan.frame_id) + ... ctr += 1 + ... if ctr == 10: + ... break + + +As we noted earlier, if we don't supply ``sensor_idx=0`` to the ``open_source`` method, the method will construct a +``MultiScanSource``, which always addresses a group of sensors. Thus, when iterating over the ``source`` the user +receives a collated set of scans from the addressed sensors per iteration. The ``MultiScanSource`` examines the +timestamp of every scan from every sensor and returns a list of scans that fit within the same time window as single +batch. The size of the batch is fixed corresponding to how many sensors contained in the pcap or osf file. However, +the collation could yield a null value if one or more of the sensors didn't produce a ``LidarScan`` object that fits +within the time frame of current batch or iteration. Thus, depending on the operation at hand it is crticial to check +if we got a valid ``LidarScan`` object when examining the iteration output of a ``MultiScanSource``. If we are to +perform the same example as above when ``source`` is a handle to ``MultiScanSource`` and display the frame_id of +``LidarScan`` objects the belongs to the same batch on the same line the code needs to updated to the following: + +.. code:: python + + >>> ctr = 0 + >>> source_iter = iter(source) + >>> for scans in source_iter: + ... for scan in scans: # source_iter here returns a list of scans + ... if scan: # check if invidiual scan object is valid + ... print(scan.frame_id, end=', ') + ... print() # new line for next batch + ... ctr += 1 + ... if ctr == 10: + ... break + + +Note that when iterating over a ``MultiScanSource`` object, it always a list of scans, even when the underlying scan +source has only a single sensor. In this case, the iterator will yield a list with a single element per iteration. + + + +Using indexing and slicing capabilities of a ScanSource +======================================================== + +One of the most prominent new features of the ScanSource API, (besides being able to address multi sensors), is the +ability to use indexing and slicing when accessing the stored scans within the ``LidarScan`` source. Currently, this +capability is only supported for non-live sources. That is to say, the functionality we are discussing can only be used +when accessing a pcap or an osf file. To enable this functionality we need to indicate that we want to manipulate the +source as an indexed one upon opening. Revisitng the previous pcap open example, that would be achieved as follows: +: + + +.. code:: python + + >>> pcap_path = '' + >>> from ouster.sdk import open_source + >>> source = open_source(pcap_path, sensor_idx=0, index=True) + +First note that we omitted the ``meta`` parameter since it can be populated automatically as we explained earlier. +Second you will noticed that we introduced a new parameter ``index`` with its value set to ``True`` (default is false), +The same parameter can be applied to when dealing with an osf file but not a live sensor. + +Depending on the file size and the underlying file format there can be some delay before the file is fully indexed (OSF +file take much less time than pcap file to index). A progress bar will appear to indicate progress of the indexing. + +Once the index is built up, then we can start using utilizing and interact with the ``ScanSource`` object to access scans +in the same manner we are dealing with a python list that holds reference to LidarScan objects. + +For example to access the 10th LidarScan and print its frame id, we can do the following: + +.. code:: python + + >>> print(source[10].frame_id) + +Similarly we can access the last LidarScan object and print its frame_id using: + +.. code:: python + + >>> print(source[-1].frame_id) + + +Alternatively we can instead request a range of scans using the python slice operator. For example, to request the first 10 +scans from a ScanSource and print their frame ids, we can do the following: + +.. code:: python + + >>> for scan in source[0:10]: + ... print(scan.frame_id) + + +Note we don't need to add any break here since the operation `source[0:10]` will only yield the first 10 ``LidarScan(s)``. + +To print frame_id of the last 10 LidarScans we do: + +.. code:: python + + >>> for scan in source[-11:-1]: + ... print(scan.frame_id) + + +Finally, as you would expect from a typical slice operation, you can also using negative step and also use a reversed +iteration as shown in the following example: + +.. code:: python + + >>> for scan in source[0:10:2]: # prints the frame_id of every second scan of the first 10 scans + ... print(scan.frame_id) + + >>> for scan in source[10:0:-1]: # prints the frame_id of every scan of the first 10 scans in reverse + ... print(scan.frame_id) diff --git a/docs/python/viz/index.rst b/docs/python/viz/index.rst index 5dafc5f8..63f12bb6 100644 --- a/docs/python/viz/index.rst +++ b/docs/python/viz/index.rst @@ -42,6 +42,5 @@ below: .. toctree:: - Visualize SLAM Poses viz-api-tutorial diff --git a/docs/python/viz/viz-scans-accum.rst b/docs/python/viz/viz-scans-accum.rst index 82e05cc7..9509168e 100644 --- a/docs/python/viz/viz-scans-accum.rst +++ b/docs/python/viz/viz-scans-accum.rst @@ -23,12 +23,12 @@ Available view modes There are three view modes of **ScansAccumulator**, that may be enabled/disabled depending on it's params and the data that is passed throught it: - * **poses** (or **TRACK**), key ``8`` - all scan poses in a trajectory/path view (avaialble only + * **poses** (or **TRACK**), key ``8`` - all scan poses in a trajectory/path view (available only if poses data is present in scans) * **scan map** (or **MAP**), key ``7`` - overall map view with select ratio of random points - from every scan (avaialble for scans with/without poses) + from every scan (available for scans with/without poses) * **scan accum** (or **ACCUM**), key ``6`` - accumulated *N* scans (key frames) that is picked - according to params (avaialble for scans with/without poses) + according to params (available for scans with/without poses) Key bindings @@ -191,6 +191,6 @@ graphing tool, but it can be ``matplotlib`` instead):: cloud_map.set_point_size(1) point_viz.add(cloud_map) -In the example above one might use ``matplolib`` with some modifications to use pallette for peeking +In the example above one might use ``matplotlib`` with some modifications to use palette for picking the key color. diff --git a/docs/reference/lidar-scan.rst b/docs/reference/lidar-scan.rst index d7e14da4..3fa38633 100644 --- a/docs/reference/lidar-scan.rst +++ b/docs/reference/lidar-scan.rst @@ -1,3 +1,5 @@ +.. _lidar-scan: + ============== Lidar Scan API ============== diff --git a/docs/reference/osf.rst b/docs/reference/osf.rst new file mode 100644 index 00000000..85aa8f57 --- /dev/null +++ b/docs/reference/osf.rst @@ -0,0 +1,60 @@ +.. _osf: + +================== +Open Sensor Format +================== + +The *Open Sensor Format* (OSF) is an extensible file format for storing +time-series data, based on FlatBuffers_. + +Compared to pcap, it offers the following advantages: + +- Messages can more easily be randomly-accessed because an index from message + timestamp to the file offset of the chunk containing the message is contained + within the file. +- Full frames of lidar data are meant to be compressed as individual channels + (using lossless PNG-based compression by default,) resulting in smaller file + sizes for the same data. +- Sensor configuration is contained within the file. + +The Ouster SDK provides both Python and C++ methods for working with OSF files. + +Reading and writing OSF files +----------------------------- + +Reading, writing, and recording new OSF files are all possible with the Ouster +SDK and the Ouster SDK CLI. + +* :doc:`Python API examples <../python/examples/osf-examples>` +* :doc:`C++ API examples <../cpp/examples/simple_examples>` +* :doc:`CLI examples <../cli/sample-sessions>` + +Getting example OSF files +------------------------- + +The :doc:`sample data page <../sample-data>` has instructions for obtaining sample datasets. Sample +datasets are availble in OSF format in addition to pcap. + +OSF format details +------------------ + +Typically, users of the Ouster SDK won't have to worry about the implementation +details of OSF. The following is a very basic overview of the structure of an +OSF file. + +An OSF file generally contains the following: + +#. A header, which contains the location of the OSF metadata. +#. A series of "chunks", each of which contain one or more messages, typically containing lidar scans - see :ref:`LidarScan reference `. + +#. Metadata, a collection of generic buffers usually containing the following: + + a. An index of chunks (meant to provide the file offset, in bytes, for a chunk given a timestamp.) + b. An ``ouster/v1/streaming/StreamingInfo``, which contains the start, end timestamps and number of messages for each chunk as well as some statistics. + c. An ``ouster/v1/os_sensor/LidarSensor``, which contains the configuration of the sensor used to collect the data contained in a stream of lidar scans. + d. An ``ouster/v1/os_sensor/LidarScanStream``, which indicates which fields are present in each lidar scan. + +For more details of the structure of an OSF file, consult the definition files +found in the ``ouster_osf/fb`` directory of the Ouster SDK repository. + +.. _FlatBuffers: https://flatbuffers.dev/ diff --git a/docs/versions.json b/docs/versions.json new file mode 100644 index 00000000..f87ea1d0 --- /dev/null +++ b/docs/versions.json @@ -0,0 +1,45 @@ +[ + { + "version": "0.10.0", + "tags": { + "sdkx": "sdk/0.10.0", + "sdk": "20231031" + } + }, + { + "version": "0.9.0", + "tags": { + "sdkx": "sdk/0.9.0", + "sdk": "sdk/0.9.0" + } + }, + { + "version": "0.8.1", + "tags": { + "sdkx": "sdk/20230403", + "sdk": "20230403" + } + }, + { + "version": "0.7.1", + "tags": { + "sdkx": "sdk/20230114", + "sdk": "20230114" + } + }, + { + "version": "0.5.1", + "tags": { + "sdkx": "sdk/20220826", + "sdk": "20220826" + } + }, + { + "version": "0.4.0", + "tags": { + "sdkx": "sdk/20220608", + "sdk": "20220608" + } + } +] + diff --git a/examples/CMakeLists.txt b/examples/CMakeLists.txt index 84fad334..925019bb 100644 --- a/examples/CMakeLists.txt +++ b/examples/CMakeLists.txt @@ -1,6 +1,9 @@ add_executable(client_example client_example.cpp) target_link_libraries(client_example PRIVATE OusterSDK::ouster_client) +add_executable(async_client_example async_client_example.cpp) +target_link_libraries(async_client_example PRIVATE OusterSDK::ouster_client) + add_executable(mtp_client_example mtp_client_example.cpp) target_link_libraries(mtp_client_example PRIVATE OusterSDK::ouster_client) @@ -20,6 +23,9 @@ endif() if(TARGET OusterSDK::ouster_osf) add_executable(osf_reader_example osf_reader_example.cpp) target_link_libraries(osf_reader_example PRIVATE OusterSDK::ouster_osf) + + add_executable(osf_writer_example osf_writer_example.cpp) + target_link_libraries(osf_writer_example PRIVATE OusterSDK::ouster_osf) else() message(STATUS "No ouster_osf library available; skipping examples") endif() diff --git a/examples/async_client_example.cpp b/examples/async_client_example.cpp new file mode 100644 index 00000000..3ca55054 --- /dev/null +++ b/examples/async_client_example.cpp @@ -0,0 +1,250 @@ +/** + * Copyright (c) 2023, Ouster, Inc. + * All rights reserved. + */ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ouster/client.h" +#include "ouster/impl/build.h" +#include "ouster/lidar_scan.h" +#include "ouster/types.h" + +using namespace ouster; + +const size_t N_SCANS = 5; +const size_t UDP_BUF_SIZE = 65536; + +void FATAL(const char* msg) { + std::cerr << msg << std::endl; + std::exit(EXIT_FAILURE); +} + +/* + * Display some stats about the captured Lidar Scan + */ +void display_scan_summary(const LidarScan& scan); + +/* + * Write output to CSV files. The output can be viewed in a point cloud + * viewer like CloudCompare: + * + * [0] https://github.com/cloudcompare/cloudcompare + */ +void write_cloud(const std::string& file_path, const LidarScan::Points& cloud); + +int main(int argc, char* argv[]) { + if (argc != 2 && argc != 3) { + std::cerr << "Version: " << ouster::SDK_VERSION_FULL << " (" + << ouster::BUILD_SYSTEM << ")" + << "\n\nUsage: async_client_example " + "[]" + "\n\n is optional: leave blank for " + "automatic destination detection" + << std::endl; + + return argc == 1 ? EXIT_SUCCESS : EXIT_FAILURE; + } + + // Limit ouster_client log statements to "info" and direct the output to log + // file rather than the console (default). + sensor::init_logger("info", "ouster.log"); + + std::cerr << "Ouster client example " << ouster::SDK_VERSION << std::endl; + /* + * The sensor client consists of the network client and a library for + * reading and working with data. + * + * The network client supports reading and writing a limited number of + * configuration parameters and receiving data without working directly with + * the socket APIs. See the `client.h` for more details. The minimum + * required parameters are the sensor hostname/ip and the data destination + * hostname/ip. + */ + const std::string sensor_hostname = argv[1]; + const std::string data_destination = (argc == 3) ? argv[2] : ""; + + std::cerr << "Connecting to \"" << sensor_hostname << "\"...\n"; + + auto handle = sensor::init_client(sensor_hostname, data_destination); + if (!handle) FATAL("Failed to connect"); + std::cerr << "Connection to sensor succeeded" << std::endl; + + /* + * Configuration and calibration parameters can be queried directly from the + * sensor. These are required for parsing the packet stream and calculating + * accurate point clouds. + */ + std::cerr << "Gathering metadata..." << std::endl; + auto metadata = sensor::get_metadata(*handle); + + // Raw metadata can be parsed into a `sensor_info` struct + sensor::sensor_info info = sensor::parse_metadata(metadata); + + size_t w = info.format.columns_per_frame; + size_t h = info.format.pixels_per_column; + + ouster::sensor::ColumnWindow column_window = info.format.column_window; + + std::cerr << " Firmware version: " << info.fw_rev + << "\n Serial number: " << info.sn + << "\n Product line: " << info.prod_line + << "\n Scan dimensions: " << w << " x " << h + << "\n Column window: [" << column_window.first << ", " + << column_window.second << "]" << std::endl; + + // A LidarScan holds lidar data for an entire rotation of the device + LidarScan scan{w, h, info.format.udp_profile_lidar}; + + // pre-compute a table for efficiently calculating point clouds from + // range + XYZLut lut = ouster::make_xyz_lut(info); + // A an array of points to hold the projected representation of the scan + LidarScan::Points cloud; + + // A ScanBatcher can be used to batch packets into scans + sensor::packet_format pf = sensor::get_format(info); + ScanBatcher batch_to_scan(info.format.columns_per_frame, pf); + + /* + * The network client provides some convenience wrappers around socket APIs + * to facilitate reading lidar and IMU data from the network. It is also + * possible to configure the sensor offline and read data directly from a + * UDP socket. + */ + + // buffer to store raw packet data + auto packet_buf = std::make_unique(UDP_BUF_SIZE); + + /* + In this example we spin two threads one to receive lidar packets while the + other thread accumlates lidar packets of the same frame into a LidarScan + object, computes the xyz coordinates and then writes these coordiantes into + a file. The example is a show case of utilizing threads to decouple + reception of packets from processing the point cloud. For a more complete + examples on how to efficient stream and process lidar packets please refer + to the async_udp_source_example.cpp or the ouster_ros driver implementation + */ + size_t n_scans = 0; // counter to track the number of complete scans that + // we have successfully captured and processed. + std::mutex mtx; + std::condition_variable receiving_cv; + std::condition_variable processing_cv; + bool packet_processed = true; + + std::thread packet_receiving_thread([&]() { + while (n_scans < N_SCANS) { + // wait until sensor data is available + sensor::client_state st = sensor::poll_client(*handle); + + // check for timeout + if (st == sensor::TIMEOUT) FATAL("Client has timed out"); + + if (st & sensor::EXIT) FATAL("Exit was requested"); + + // check for error status + if (st & sensor::CLIENT_ERROR) + FATAL("Sensor client returned error state!"); + + // check for lidar data, read a packet and add it to the current + // batch + if (st & sensor::LIDAR_DATA) { + std::unique_lock lock(mtx); + receiving_cv.wait( + lock, [&packet_processed] { return packet_processed; }); + if (!sensor::read_lidar_packet(*handle, packet_buf.get(), pf)) { + FATAL("Failed to read a packet of the expected size!"); + } + packet_processed = false; + processing_cv.notify_one(); + } + + // check if IMU data is available (but don't do anything with it) + if (st & sensor::IMU_DATA) { + std::unique_lock lock(mtx); + receiving_cv.wait( + lock, [&packet_processed] { return packet_processed; }); + sensor::read_imu_packet(*handle, packet_buf.get(), pf); + // we are not going to processor imu data + // so we will keep packet_processed set to true + } + } + }); + + std::thread packet_processing_thread([&]() { + while (n_scans < N_SCANS) { + std::unique_lock lock(mtx); + processing_cv.wait( + lock, [&packet_processed] { return !packet_processed; }); + // batcher will return "true" when the current scan is complete + if (batch_to_scan(packet_buf.get(), scan)) { + // retry until we receive a full set of valid measurements + // (accounting for azimuth_window settings if any) + if (scan.complete(info.format.column_window)) { + display_scan_summary(scan); + std::cerr << "Computing point cloud... " << std::endl; + cloud = ouster::cartesian(scan, lut); + std::string file_name = + "cloud_" + std::to_string(n_scans) + ".csv"; + write_cloud(file_name, cloud); + ++n_scans; + } + } + + packet_processed = true; + receiving_cv.notify_one(); + } + }); + + packet_receiving_thread.join(); + packet_processing_thread.join(); + + std::cerr << "done" << std::endl; + + return EXIT_SUCCESS; +} + +void display_scan_summary(const LidarScan& scan) { + // channel fields can be queried as well + auto n_valid_first_returns = (scan.field(sensor::RANGE) != 0).count(); + + // LidarScan also provides access to header information such as + // status and timestamp + auto status = scan.status(); + auto it = std::find_if(status.data(), status.data() + status.size(), + [](const uint32_t s) { + return (s & 0x01); + }); // find first valid status + if (it != status.data() + status.size()) { + auto ts_ms = std::chrono::duration_cast( + std::chrono::nanoseconds(scan.timestamp()( + it - status.data()))); // get corresponding timestamp + + std::cerr << " Frame no. " << scan.frame_id << " with " + << n_valid_first_returns << " valid first returns at " + << ts_ms.count() << " ms" << std::endl; + } +} + +void write_cloud(const std::string& file_path, const LidarScan::Points& cloud) { + std::ofstream out; + out.open(file_path); + out << std::fixed << std::setprecision(4); + + // write each point, filtering out points without returns + for (int i = 0; i < cloud.rows(); i++) { + auto xyz = cloud.row(i); + if (!xyz.isApproxToConstant(0.0)) + out << xyz(0) << ", " << xyz(1) << ", " << xyz(2) << std::endl; + } + + out.close(); + std::cerr << " Wrote " << file_path << std::endl; +} \ No newline at end of file diff --git a/examples/client_example.cpp b/examples/client_example.cpp index dbf42864..65284602 100644 --- a/examples/client_example.cpp +++ b/examples/client_example.cpp @@ -75,7 +75,10 @@ int main(int argc, char* argv[]) { ouster::sensor::ColumnWindow column_window = info.format.column_window; - std::cerr << " Firmware version: " << info.fw_rev + // The dedicated firmware_version_from_metadata API works across firmwares + auto fw_ver = sensor::firmware_version_from_metadata(metadata); + + std::cerr << " Firmware version: " << to_string(fw_ver) << "\n Serial number: " << info.sn << "\n Product line: " << info.prod_line << "\n Scan dimensions: " << w << " x " << h @@ -99,8 +102,8 @@ int main(int argc, char* argv[]) { std::cerr << "Capturing points... "; // buffer to store raw packet data - auto lidar_packet = sensor::LidarPacket(); - auto imu_packet = sensor::ImuPacket(); + auto lidar_packet = sensor::LidarPacket(pf.lidar_packet_size); + auto imu_packet = sensor::ImuPacket(pf.imu_packet_size); for (size_t i = 0; i < N_SCANS;) { // wait until sensor data is available @@ -112,7 +115,7 @@ int main(int argc, char* argv[]) { // check for lidar data, read a packet and add it to the current batch if (st & sensor::LIDAR_DATA) { - if (!sensor::read_lidar_packet(*handle, lidar_packet, pf)) { + if (!sensor::read_lidar_packet(*handle, lidar_packet)) { FATAL("Failed to read a packet of the expected size!"); } @@ -126,7 +129,7 @@ int main(int argc, char* argv[]) { // check if IMU data is available (but don't do anything with it) if (st & sensor::IMU_DATA) { - sensor::read_imu_packet(*handle, imu_packet, pf); + sensor::read_imu_packet(*handle, imu_packet); } } std::cerr << "ok" << std::endl; diff --git a/examples/osf_reader_example.cpp b/examples/osf_reader_example.cpp index b575e220..934137ac 100644 --- a/examples/osf_reader_example.cpp +++ b/examples/osf_reader_example.cpp @@ -7,6 +7,7 @@ * explanations. */ +//! [doc-stag-osf-read-cpp] #include #include "ouster/impl/build.h" @@ -46,4 +47,5 @@ int main(int argc, char* argv[]) { } } } -} \ No newline at end of file +} +//! [doc-etag-osf-read-cpp] diff --git a/examples/osf_writer_example.cpp b/examples/osf_writer_example.cpp new file mode 100644 index 00000000..d5f62c50 --- /dev/null +++ b/examples/osf_writer_example.cpp @@ -0,0 +1,40 @@ +/** + * Copyright (c) 2024, Ouster, Inc. + * All rights reserved. + * + * This file contains example code for working with the osf::Writer class of + * the C++ Ouster SDK. Please see the sdk docs at static.ouster.dev for clearer + * explanations. + */ + +//! [doc-stag-osf-write-cpp] +#include + +#include "ouster/impl/build.h" +#include "ouster/osf/writer.h" + +using namespace ouster; + +int main(int argc, char* argv[]) { + if (argc != 2) { + std::cerr << "Version: " << ouster::SDK_VERSION_FULL << " (" + << ouster::BUILD_SYSTEM << ")" + << "\n\nUsage: osf_writer_example " << std::endl; + + return (argc == 1) ? EXIT_SUCCESS : EXIT_FAILURE; + } + + const std::string osf_file = argv[1]; + + // Start writing a 1 stream OSF file with a default initialized sensor info + osf::Writer writer( + osf_file, sensor::default_sensor_info(ouster::sensor::MODE_512x10)); + + // Instantiate a lidar scan with the expected width and height + // default_sensor_info assumes a 64 plane sensor + LidarScan scan(512, 64); + // Manipulate the scan as desired here + // Write it to file on stream 0 + writer.save(0, scan); +} +//! [doc-2tag-osf-write-cpp] diff --git a/ouster_client/CMakeLists.txt b/ouster_client/CMakeLists.txt index b0a05260..6f21a128 100644 --- a/ouster_client/CMakeLists.txt +++ b/ouster_client/CMakeLists.txt @@ -3,10 +3,11 @@ find_package(Eigen3 REQUIRED) find_package(jsoncpp REQUIRED) find_package(CURL REQUIRED) find_package(spdlog REQUIRED) +include(Coverage) # ==== Libraries ==== add_library(ouster_client src/client.cpp src/types.cpp src/sensor_info.cpp src/netcompat.cpp src/lidar_scan.cpp - src/image_processing.cpp src/buffered_udp_source.cpp src/parsing.cpp + src/image_processing.cpp src/udp_packet_source.cpp src/parsing.cpp src/sensor_http.cpp src/sensor_http_imp.cpp src/sensor_tcp_imp.cpp src/logging.cpp src/profile_extension.cpp src/util.cpp) @@ -19,6 +20,8 @@ target_link_libraries(ouster_client jsoncpp_lib spdlog::spdlog) target_compile_definitions(ouster_client PRIVATE EIGEN_MPL2_ONLY) +CodeCoverageFunctionality(ouster_client) + add_library(OusterSDK::ouster_client ALIAS ouster_client) # If ouster_client is built as >=c++17, the nonstd::optional backport diff --git a/ouster_client/include/ouster/buffered_udp_source.h b/ouster_client/include/ouster/buffered_udp_source.h deleted file mode 100644 index dd88315b..00000000 --- a/ouster_client/include/ouster/buffered_udp_source.h +++ /dev/null @@ -1,190 +0,0 @@ -/** - * Copyright (c) 2021, Ouster, Inc. - * All rights reserved. - * - * @file - * @brief Wrapper around sensor::client to provide buffering - * - * *Not* a public API. Currently part of the Python bindings implementation. - * - * Maintains a single-producer / single-consumer circular buffer that can be - * populated by a thread without holding the GIL to deal the relatively small - * default OS buffer size and high sensor UDP data rate. Must be thread-safe to - * allow reading data without holding the GIL while other references to the - * client exist. - */ - -#pragma once - -#include -#include -#include -#include -#include -#include - -#include "ouster/client.h" -#include "ouster/types.h" - -namespace ouster { -namespace sensor { -namespace impl { - -// 64 big enough for any UDP packet -constexpr size_t packet_size = 65536; - -class BufferedUDPSource { - // client handle - std::mutex cli_mtx_; - std::shared_ptr cli_; - uint32_t lidar_port_; - uint32_t imu_port_; - - // protect read/write_ind_ and stop_ - std::mutex cv_mtx_; - std::condition_variable cv_; - size_t read_ind_{0}, write_ind_{0}; - - // flag for other threads to signal producer to shut down - bool stop_{false}; - - // internal packet buffer - size_t capacity_{0}; - using entry = std::pair; - std::vector bufs_; - - explicit BufferedUDPSource(size_t buf_size); - - public: - /* Extra bit flag compatible with client_state to signal buffer overflow. */ - static constexpr int CLIENT_OVERFLOW = 0x10; - - /** - * Listen for sensor data on the specified ports; do not configure the - * sensor. - * - * @param[in] hostname hostname or IP of the sensor. - * @param[in] lidar_port port on which the sensor will send lidar data. - * @param[in] imu_port port on which the sensor will send imu data. - * @param[in] buf_size size of internal buffer, in no. packets. - */ - BufferedUDPSource(const std::string& hostname, int lidar_port, int imu_port, - size_t buf_size); - - /** - * Connect to and configure the sensor and start listening for data. - * - * Will be removed. - */ - [[deprecated]] BufferedUDPSource(const std::string& hostname, - const std::string& udp_dest_host, - lidar_mode mode, timestamp_mode ts_mode, - int lidar_port, int imu_port, - int timeout_sec, size_t buf_size); - - /** - * Fetch metadata from the sensor. - * - * @param[in] timeout_sec maximum time to wait until sensor is initialized. - * @param[in] legacy_format whether to use legacy format for metadata. - * @return a json string of the sensor metadata. - */ - std::string get_metadata(int timeout_sec = 60, bool legacy_format = true); - - /** - * Signal the producer to exit. - * - * Subsequent calls to consume() will return - * CLIENT_EXIT instead of blocking. Multiple calls to shutdown() are not an - * error. - */ - void shutdown(); - - /** - * Drop up to the specified number of packets from internal buffers. - * - * Drop all internally buffered data when n_packets = 0. Should only be - * called by the consumer thread. - * - * @param n_packets number of packets to drop. - */ - void flush(size_t n_packets); - - /** - * Get current buffer size. - * - * @return number of packets currently buffered. - */ - size_t size(); - - /** - * Get the maximum buffer size. - * - * @return maximum number of packets that can be buffered. - */ - size_t capacity(); - - /** - * Read next available packet in the buffer. - * - * Blocks if the queue is empty for up to `timeout_sec` (zero means wait - * forever). Should only be called by the consumer thread. If reading from - * the network was blocked because the buffer was full, the the - * CLIENT_OVERFLOW flag will be set on the next returned status. - * - * @param[in] buf the buffer to read into. - * @param[in] buf_sz maximum number of bytes to read into the buffer. - * @param[in] timeout_sec maximum time to wait for data. - * @return client status, see sensor::poll_client(). - */ - [[deprecated]] client_state consume(uint8_t* buf, size_t buf_sz, - float timeout_sec); - - /** - * Read next available packet in the buffer. - * - * If client_state returns LIDAR_DATA, submitted lidar packet will be - * populated, similarly if client_state returns IMU_DATA, submitted - * imu packet will be populated instead. - * - * Blocks if the queue is empty for up to `timeout_sec` (zero means wait - * forever). Should only be called by the consumer thread. If reading from - * the network was blocked because the buffer was full, the the - * CLIENT_OVERFLOW flag will be set on the next returned status. - * - * @param[in] lidarp lidar packet to read into - * @param[in] imu imu packet to read into - * @param[in] timeout_sec maximum time to wait for data. - * @return client status, see sensor::poll_client(). - */ - client_state consume(LidarPacket& lidarp, ImuPacket& imup, - float timeout_sec); - - /** - * Write data from the network into the circular buffer. - * - * Returns when shutdown() is signaled by the reader. Should be called from - * a separate thread from the consumer. - * - * @param[in] pf the packet format associated with the UDP stream. - */ - void produce(const ouster::sensor::packet_format& pf); - - /** - * Return the port used to listen for lidar UDP data. - * - * @return the lidar UDP port or 0 if shut down. - */ - int get_lidar_port(); - - /** - * Return the port used to listen for imu UDP data. - * - * @return the lidar UDP port or 0 if shut down. - */ - int get_imu_port(); -}; - -} // namespace impl -} // namespace sensor -} // namespace ouster diff --git a/ouster_client/include/ouster/client.h b/ouster_client/include/ouster/client.h index 99b65760..d4c61cb4 100644 --- a/ouster_client/include/ouster/client.h +++ b/ouster_client/include/ouster/client.h @@ -90,6 +90,7 @@ std::shared_ptr init_client(const std::string& hostname, int lidar_port, * @param[in] imu_port port on which the sensor will send imu data. When * using zero the method will automatically acquire and assign any free port. * @param[in] timeout_sec how long to wait for the sensor to initialize. + * @param[in] persist_config if true, persists sensor settings between restarts * * @return pointer owning the resources associated with the connection. */ @@ -97,7 +98,8 @@ std::shared_ptr init_client( const std::string& hostname, const std::string& udp_dest_host, lidar_mode ld_mode = MODE_UNSPEC, timestamp_mode ts_mode = TIME_FROM_UNSPEC, int lidar_port = 0, int imu_port = 0, - int timeout_sec = DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS); + int timeout_sec = DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS, + bool persist_config = false); /** * [BETA] Connect to and configure the sensor and start listening for data via @@ -110,6 +112,7 @@ std::shared_ptr init_client( * @param[in] main a flag that indicates this is the main connection to the * sensor in an multicast setup. * @param[in] timeout_sec how long to wait for the sensor to initialize. + * @param[in] persist_config if true, persists sensor settings between restarts * * @return pointer owning the resources associated with the connection. * @@ -120,7 +123,8 @@ std::shared_ptr init_client( std::shared_ptr mtp_init_client( const std::string& hostname, const sensor_config& config, const std::string& mtp_dest_host, bool main, - int timeout_sec = DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS); + int timeout_sec = DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS, + bool persist_config = false); /** @}*/ @@ -128,7 +132,7 @@ std::shared_ptr mtp_init_client( * Block for up to timeout_sec until either data is ready or an error occurs. * * NOTE: will return immediately if LIDAR_DATA or IMU_DATA are set and not - * cleared by read_lidar_data() and read_imu_data() before the next call. + * cleared by read_lidar_packet() and read_imu_packet() before the next call. * * @param[in] cli client returned by init_client associated with the connection. * @param[in] timeout_sec seconds to block while waiting for data. @@ -156,14 +160,37 @@ bool read_lidar_packet(const client& cli, uint8_t* buf, * Read lidar data from the sensor. Will not block. * * @param[in] cli client returned by init_client associated with the connection. - * @param[out] packet A LidarPacket to store lidar data read from a sensor. In - * addition, the LidarPacket's host_timestamp attribute is also set. - * @param[in] pf The packet format. + * @param[out] buf buffer to which to write lidar data. Must be at least + * `bytes + 1` bytes. + * @param[in] bytes expected number of bytes in the packet * * @return true if a lidar packet was successfully read. */ -bool read_lidar_packet(const client& cli, LidarPacket& packet, - const packet_format& pf); +bool read_lidar_packet(const client& cli, uint8_t* buf, size_t bytes); + +/** + * Read lidar data from the sensor. Will not block. + * + * @param[in] cli client returned by init_client associated with the connection. + * @param[out] packet A LidarPacket to store lidar data read from a sensor. + * Expects the packet to have *correct* number of bytes allocated for the + * packet. In addition, the LidarPacket's host_timestamp attribute is also set. + * + * @return true if a lidar packet was successfully read. + */ +bool read_lidar_packet(const client& cli, LidarPacket& packet); + +/** + * Read imu data from the sensor. Will not block. + * + * @param[in] cli client returned by init_client associated with the connection. + * @param[out] buf buffer to which to write lidar data. Must be at least + * `bytes + 1` bytes. + * @param[in] bytes expected number of bytes in the packet + * + * @return true if a lidar packet was successfully read. + */ +bool read_imu_packet(const client& cli, uint8_t* buf, size_t bytes); /** * Read imu data from the sensor. Will not block. @@ -181,15 +208,13 @@ bool read_imu_packet(const client& cli, uint8_t* buf, const packet_format& pf); * Read imu data from the sensor. Will not block. * * @param[in] cli client returned by init_client associated with the connection. - * @param[out] packet An ImuPacket to store imu data read from a sensor. In + * @param[out] packet An ImuPacket to store imu data read from a sensor. Expects + * the packet to have *correct* number of bytes allocated for the packet. In * addition, the ImuPacket's host_timestamp attribute is also set. - * imu_packet_bytes + 1 bytes. - * @param[in] pf The packet format. * * @return true if an imu packet was successfully read. */ -bool read_imu_packet(const client& cli, ImuPacket& packet, - const packet_format& pf); +bool read_imu_packet(const client& cli, ImuPacket& packet); /** * Get metadata text blob from the sensor. @@ -220,6 +245,8 @@ std::string get_metadata(client& cli, * @param[in] hostname sensor hostname. * @param[out] config sensor config to populate. * @param[in] active whether to pull active or passive configs. + * @param[in] timeout_sec set the timeout for the request, + * this argument is optional. * * @return true if sensor config successfully populated. */ @@ -249,6 +276,7 @@ enum config_flags : uint8_t { * @param[in] hostname sensor hostname. * @param[in] config sensor config. * @param[in] config_flags flags to pass in. + * @param[in] timeout_sec timeout in seconds for http requests * * @return true if config params successfuly set on sensor. */ @@ -263,7 +291,7 @@ bool set_config(const std::string& hostname, const sensor_config& config, * * @return the port number. */ -int get_lidar_port(client& cli); +int get_lidar_port(const client& cli); /** * Return the port used to listen for imu UDP data. @@ -272,7 +300,7 @@ int get_lidar_port(client& cli); * * @return the port number. */ -int get_imu_port(client& cli); +int get_imu_port(const client& cli); /** * Check if ip address in multicast range. diff --git a/ouster_client/include/ouster/impl/client_poller.h b/ouster_client/include/ouster/impl/client_poller.h new file mode 100644 index 00000000..28a9221e --- /dev/null +++ b/ouster_client/include/ouster/impl/client_poller.h @@ -0,0 +1,70 @@ +/** + * Copyright (c) 2023, Ouster, Inc. + * All rights reserved. + */ + +#pragma once + +namespace ouster { +namespace sensor { +namespace impl { + +/** + * Poller used in multiclient scenarios + */ +struct client_poller; + +/** + * produces uninitialized poller + */ +std::shared_ptr make_poller(); + +/** + * Reset poller. Must be called prior to any other operations + * + * @param[in] poller client_poller to reset + */ +void reset_poll(client_poller& poller); + +/** + * Set poller to watch client on the next poll call + * + * @param[in] poller client_poller + * @param[in] cli client to watch + */ +void set_poll(client_poller& poller, const client& cli); + +/** + * Polls clients previously set with `set_poll` + * + * @param[in] poller client_poller + * @param[in] timeout_sec timeout in seconds + * + * @return -1 for error, 0 for timeout, otherwise number of messages received + */ +int poll(client_poller& poller, int timeout_sec = 1); + +/** + * Retrieves error state of the poller + * + * @param[in] poller client_poller + * + * @return client_state which is one of CLIENT_ERROR or EXIT on error, + * otherwise returning TIMEOUT if no error occurred + */ +client_state get_error(const client_poller& poller); + +/** + * Retrieve poll results for particular client + * + * @param[in] poller client_poller + * @param[in] cli client to retrieve results for + * + * @return client_state comprising of either LIDAR_DATA or IMU_DATA, or TIMEOUT + * if no data was received + */ +client_state get_poll(const client_poller& poller, const client& cli); + +} // namespace impl +} // namespace sensor +} // namespace ouster diff --git a/ouster_client/include/ouster/impl/lidar_scan_impl.h b/ouster_client/include/ouster/impl/lidar_scan_impl.h index b2f242d0..9224b882 100644 --- a/ouster_client/include/ouster/impl/lidar_scan_impl.h +++ b/ouster_client/include/ouster/impl/lidar_scan_impl.h @@ -312,6 +312,22 @@ struct copy_and_cast { } }; +/** + * Zeros fields in LidarScans + */ +struct zero_field { + /** + * Zeros the field dest. + * + * @tparam T The type of data inside of the eigen array. + * @param[in,out] field_dest The field to zero. + */ + template + void operator()(Eigen::Ref> field_dest) { + field_dest.setZero(); + } +}; + /** * Checks whether RAW_HEADERS field is present and can be used to store headers. * @@ -326,7 +342,7 @@ bool raw_headers_enabled(const sensor::packet_format& pf, const LidarScan& ls); template void scan_to_packets(const LidarScan& ls, const ouster::sensor::impl::packet_writer& pw, - OutputItT iter) { + OutputItT iter, uint32_t init_id, uint64_t prod_sn) { int total_packets = ls.packet_timestamp().size(); auto columns_per_packet = pw.columns_per_packet; @@ -361,6 +377,8 @@ void scan_to_packets(const LidarScan& ls, packet.host_timestamp = ls.packet_timestamp()[p_id]; pw.set_frame_id(lidar_buf, frame_id); + pw.set_init_id(lidar_buf, init_id); + pw.set_prod_sn(lidar_buf, prod_sn); bool any_valid = false; for (int icol = 0; icol < columns_per_packet; ++icol) { diff --git a/ouster_client/src/netcompat.h b/ouster_client/include/ouster/impl/netcompat.h similarity index 100% rename from ouster_client/src/netcompat.h rename to ouster_client/include/ouster/impl/netcompat.h diff --git a/ouster_client/include/ouster/impl/packet_writer.h b/ouster_client/include/ouster/impl/packet_writer.h index b9656140..63652fd4 100644 --- a/ouster_client/include/ouster/impl/packet_writer.h +++ b/ouster_client/include/ouster/impl/packet_writer.h @@ -32,6 +32,8 @@ class packet_writer : public packet_format { void set_col_timestamp(uint8_t* col_buf, uint64_t ts) const; void set_col_measurement_id(uint8_t* col_buf, uint16_t m_id) const; void set_frame_id(uint8_t* lidar_buf, uint32_t frame_id) const; + void set_init_id(uint8_t* lidar_buf, uint32_t init_id) const; + void set_prod_sn(uint8_t* lidar_buf, uint64_t sn) const; template void set_px(uint8_t* px_buf, ChanField i, T value) const; diff --git a/ouster_client/include/ouster/impl/ring_buffer.h b/ouster_client/include/ouster/impl/ring_buffer.h new file mode 100644 index 00000000..7a35b4e3 --- /dev/null +++ b/ouster_client/include/ouster/impl/ring_buffer.h @@ -0,0 +1,282 @@ +/** + * Copyright (c) 2023, Ouster, Inc. + * All rights reserved. + */ + +#pragma once + +#include +#include +#include +#include +#include + +namespace ouster { +namespace sensor { +namespace impl { + +/** + * Ring buffer class for internal use. + * + * This is NOT thread safe, thread safety is delegated to the user. + * Correct read/write procedure is: + * \code + * auto rb = RingBuffer{size, T{...}}; + * + * // write + * if (!rb.full()) { + * T& element = rb.back(); + * do_write(element); + * rb.push(); + * } + * + * // read + * if (!rb.empty()) { + * T& element = rb.front(); + * do_read(element); + * rb.pop(); + * } + * + * \endcode + */ +template +class RingBuffer { + static_assert(std::is_copy_constructible::value, + "must be copy constructible"); + + std::atomic r_idx_, w_idx_; + std::vector bufs_; + + size_t _capacity() const { return bufs_.size(); } + + public: + RingBuffer(size_t size, T value = {}) + : r_idx_(0), w_idx_(0), bufs_(size + 1, value) {} + + RingBuffer(RingBuffer&& other) { + std::swap(bufs_, other.bufs_); + r_idx_ = other.r_idx_.load(); + w_idx_ = other.w_idx_.load(); + } + + /** + * Report the total capacity of allocated elements. + */ + size_t capacity() const { return _capacity() - 1; } + + /** + * Report the size of currently used elements. + */ + size_t size() const { + return (_capacity() + w_idx_ - r_idx_) % _capacity(); + } + + /** + * Check whether ring buffer is empty. + * + * NOTE: + * \code + * if (!rb.empty()) { + * // inside the block, rb.empty() is not guaranteed to stay the same + * // *unless* it is called by the *only* thread advancing read + * // indices to this buffer + * } + * \endcode + */ + bool empty() const { return w_idx_ == r_idx_; } + + /** + * Check whether ring buffer is empty. + * + * NOTE: + * \code + * if (!rb.full()) { + * // inside the block, rb.full() is not guaranteed to stay the same + * // *unless* it is called by the *only* thread advancing write + * // indices to this buffer + * } + * \endcode + */ + bool full() const { return r_idx_ == ((w_idx_ + 1) % _capacity()); } + + /** + * Get element at the front of the ring buffer. + */ + T& front() { return bufs_[r_idx_]; } + const T& front() const { return bufs_[r_idx_]; } + + /** + * Get element at the back of the ring buffer. + */ + T& back() { return bufs_[w_idx_]; } + const T& back() const { return bufs_[w_idx_]; } + + /** + * Flush the ring buffer, making it empty. + */ + void flush() { r_idx_ = w_idx_.load(); }; + + /** + * Atomically increment read index. + * + * Throws if ring buffer is empty. + */ + void pop() { + if (empty()) throw std::underflow_error("popped an empty ring buffer"); + size_t read_idx = r_idx_.load(); + while (!r_idx_.compare_exchange_strong(read_idx, + (read_idx + 1) % _capacity())) { + } + } + + /** + * Atomically increment write index. + * + * Throws if ring buffer is full. + */ + void push() { + if (full()) throw std::overflow_error("pushed a full ring buffer"); + size_t write_idx = r_idx_.load(); + // atomic increment modulo + while (!w_idx_.compare_exchange_strong(write_idx, + (write_idx + 1) % _capacity())) { + } + } +}; + +/** + * Convenience class for working with multiple ring buffers. + */ +template +class RingBufferMap { + using MapBuffers = std::unordered_map>; + MapBuffers rb_map_; + + public: + using MapInputs = std::unordered_map>; + + RingBufferMap() {} + + RingBufferMap(const MapInputs& inputs) : rb_map_{} { + for (const auto& pair : inputs) { + allocate(pair.first, pair.second.first, pair.second.second); + } + } + + /** + * Allocate a new ring buffer. + * + * @param[in] key key to allocate with + * @param[in] size size, in elements, to allocate + * @param[in] value default value of elements in newly allocated ring buffer + */ + void allocate(K key, size_t size, V value) { + if (rb_map_.find(key) != rb_map_.end()) { + throw std::invalid_argument( + "RingBufferMap: failed allocating a ring buffer, key already " + "exists"); + } + + rb_map_.emplace(key, RingBuffer{size, value}); + } + + /** + * Retrieve value at the front of the ring buffer at specified key. + */ + V& front(const K& key) { return rb_map_.at(key).front(); } + const V& front(const K& key) const { return rb_map_.at(key).front(); } + + /** + * Retrieve value at the back of the ring buffer at specified key. + */ + V& back(const K& key) { return rb_map_.at(key).back(); } + const V& back(const K& key) const { return rb_map_.at(key).back(); } + + /** + * Advance read index of the ring buffer at specified key. + */ + void pop(const K& key) { rb_map_.at(key).pop(); } + + /** + * Advance write index of the ring buffer at specified key. + */ + void push(const K& key) { rb_map_.at(key).push(); } + + /** + * Check if the ring buffer at specified key is empty. + */ + bool empty(const K& key) const { return rb_map_.at(key).empty(); } + + /** + * Check if the ring buffer at specified key is full. + */ + bool full(const K& key) const { return rb_map_.at(key).full(); } + + /** + * Report the capacity of the ring buffer at specified key. + */ + size_t capacity(const K& key) const { return rb_map_.at(key).capacity(); } + + /** + * Report the current size of the ring buffer at specified key. + */ + size_t size(const K& key) const { return rb_map_.at(key).size(); } + + /** + * Flush an internal buffer at specified key. + */ + void flush(const K& key) { return rb_map_.at(key).flush(); } + + /** + * Flush all internal buffers. + */ + void flush() { + for (auto& kv : rb_map_) kv.second.flush(); + } + + /** + * Check if any one of the internal buffers is full. + */ + bool any_full() const { + return std::any_of(rb_map_.begin(), rb_map_.end(), + [](const auto& kv) { return kv.second.full(); }); + } + + /** + * Check if any one of the internal buffers is empty. + */ + bool any_empty() const { + return std::any_of(rb_map_.begin(), rb_map_.end(), + [](const auto& kv) { return kv.second.empty(); }); + } + + /** + * Reports total amount of currently stored packets in internal buffers. + * + * NOTE: this is not a great metric since it does not report specific + * buffers, but the total amount instead. + */ + size_t size() const { + return std::accumulate(rb_map_.begin(), rb_map_.end(), size_t{0}, + [](size_t total, const auto& kv) { + return total + kv.second.size(); + }); + } + + /** + * Reports total allocated capacity of packets stored in internal buffers. + * + * NOTE: this is not a great metric since it does not report specific + * buffers, but the total amount instead. + */ + size_t capacity() const { + return std::accumulate(rb_map_.begin(), rb_map_.end(), size_t{0}, + [](size_t total, const auto& kv) { + return total + kv.second.capacity(); + }); + } +}; + +} // namespace impl +} // namespace sensor +} // namespace ouster diff --git a/ouster_client/include/ouster/lidar_scan.h b/ouster_client/include/ouster/lidar_scan.h index 64055853..a2c95e11 100644 --- a/ouster_client/include/ouster/lidar_scan.h +++ b/ouster_client/include/ouster/lidar_scan.h @@ -93,7 +93,7 @@ class LidarScan { * @warning Members variables: use with caution, some of these will become * private. */ - int32_t frame_id{-1}; + int64_t frame_id{-1}; using FieldIter = decltype(field_types_)::const_iterator; ///< An STL Iterator of the @@ -105,8 +105,8 @@ class LidarScan { /** * Initialize a scan with fields configured for the LEGACY udp profile. * - * @param[in] w horizontal resoulution, i.e. the number of measurements per - * scan. + * @param[in] w horizontal resolution, i.e. the number of measurements per + * scan. * @param[in] h vertical resolution, i.e. the number of channels. * * Note, the number of columns per packet is set to the default @@ -117,10 +117,12 @@ class LidarScan { /** * Initialize a scan with the default fields for a particular udp profile. * - * @param[in] w horizontal resoulution, i.e. the number of measurements per - * scan. + * @param[in] w horizontal resolution, i.e. the number of measurements per + * scan. * @param[in] h vertical resolution, i.e. the number of channels. * @param[in] profile udp profile. + * @param[in] columns_per_packet The number of columns per packet, + * this argument is optional. */ LidarScan(size_t w, size_t h, sensor::UDPProfileLidar profile, size_t columns_per_packet = DEFAULT_COLUMNS_PER_PACKET); @@ -131,10 +133,12 @@ class LidarScan { * @tparam Iterator A standard template iterator for the custom fields. * * @param[in] w horizontal resoulution, i.e. the number of measurements per - * scan. + * scan. * @param[in] h vertical resolution, i.e. the number of channels. * @param[in] begin begin iterator of pairs of channel fields and types. * @param[in] end end iterator of pairs of channel fields and types. + * @param[in] columns_per_packet The number of columns per packet, + * this argument is optional. */ template LidarScan(size_t w, size_t h, Iterator begin, Iterator end, @@ -148,17 +152,30 @@ class LidarScan { */ LidarScan(const LidarScan& other); + /** + * Initialize a lidar scan from another with only the indicated fields. + * Casts, zero pads or removes fields from the original scan if necessary. + * + * @param[in] other The other lidar scan to initialize from. + * @param[in] fields Fields to have in new lidar scan. + */ + LidarScan(const LidarScan& other, const LidarScanFieldTypes& fields); + /** @copydoc LidarScan(const LidarScan& other) */ LidarScan(LidarScan&& other); /** - * Copy via Move semantic. + * Copy. * * @param[in] other The lidar scan to copy from. */ LidarScan& operator=(const LidarScan& other); - /** @copydoc operator=(const LidarScan& other) */ + /** + * Copy via Move semantic. + * + * @param[in] other The lidar scan to copy from. + */ LidarScan& operator=(LidarScan&& other); /** @@ -177,6 +194,7 @@ class LidarScan { sensor::ThermalShutdownStatus thermal_shutdown() const; /** + * @defgroup ClientLidarScanField Access fields in a lidar scan * Access a lidar data field. * * @throw std::invalid_argument if T does not match the runtime field type. @@ -189,11 +207,17 @@ class LidarScan { * * @return a view of the field data. */ + + /** + * @copydoc ClientLidarScanField + */ template ::value, T>::type = 0> Eigen::Ref> field(sensor::ChanField f); - /** @copydoc field(Field f) */ + /** + * @copydoc ClientLidarScanField + */ template ::value, T>::type = 0> Eigen::Ref> field(sensor::ChanField f) const; @@ -239,6 +263,13 @@ class LidarScan { */ Eigen::Ref> packet_timestamp() const; + /** + * Return the first valid packet timestamp + * + * @return the first valid packet timestamp, 0 if none available + */ + uint64_t get_first_valid_packet_timestamp() const; + /** * Access the measurement id headers. * diff --git a/ouster_client/include/ouster/sensor_http.h b/ouster_client/include/ouster/sensor_http.h index 8e536f05..1430f623 100644 --- a/ouster_client/include/ouster/sensor_http.h +++ b/ouster_client/include/ouster/sensor_http.h @@ -122,6 +122,8 @@ class SensorHttp { * Retrieves sensor firmware version information as a string. * * @param[in] hostname hostname of the sensor to communicate with. + * @param[in] timeout_sec The timeout to use in seconds, this argument + * is optional. */ static std::string firmware_version_string( const std::string& hostname, @@ -131,6 +133,8 @@ class SensorHttp { * Retrieves sensor firmware version information. * * @param[in] hostname hostname of the sensor to communicate with. + * @param[in] timeout_sec The timeout to use in seconds, this argument + * is optional. */ static ouster::util::version firmware_version( const std::string& hostname, @@ -140,6 +144,8 @@ class SensorHttp { * Creates an instance of the SensorHttp interface. * * @param[in] hostname hostname of the sensor to communicate with. + * @param[in] timeout_sec The timeout to use in seconds, this argument + * is optional. */ static std::unique_ptr create( const std::string& hostname, diff --git a/ouster_client/include/ouster/types.h b/ouster_client/include/ouster/types.h index ab31d611..a9bf6065 100644 --- a/ouster_client/include/ouster/types.h +++ b/ouster_client/include/ouster/types.h @@ -20,6 +20,7 @@ #include #include "nonstd/optional.hpp" +#include "version.h" namespace ouster { @@ -188,6 +189,35 @@ enum UDPProfileIMU { PROFILE_IMU_LEGACY = 1, ///< Legacy IMU data }; +/** Full scale range for IMU data. */ +enum FullScaleRange { + /** Higher precision lower range measurement mode */ + FSR_NORMAL = 0, + + /** Lower precision higher range measurement mode */ + FSR_EXTENDED +}; + +/** Priority of returns for the lidar to output. + * Lidar can have more than 1 or 2 detected "returns". + * This indicates to the lidar which ones it should output. + * See sensor docs for more details. + */ +enum ReturnOrder { + /** Lidar returns the strongest returns first */ + ORDER_STRONGEST_TO_WEAKEST = 0, + + /** Lidar returns the furthest returns first */ + ORDER_FARTHEST_TO_NEAREST, + + /** Lidar returns the nearest returns first */ + ORDER_NEAREST_TO_FARTHEST, + + /** DEPRECATED: Only Present In Old Test Firmware */ + ORDER_DEPRECATED_STRONGEST_RETURN_FIRST, + ORDER_DEPRECATED_LAST_RETURN_FIRST +}; + /** Thermal Shutdown status. */ enum ThermalShutdownStatus { THERMAL_SHUTDOWN_NORMAL = 0x00, ///< Normal operation @@ -359,6 +389,29 @@ struct sensor_config { * Refer to UDPProfileIMU for more details. */ optional udp_profile_imu; + + /** + * The gyro full scale measurement range to use. + * Refer to FullScaleRange for more details. + */ + optional gyro_fsr; + + /** + * The accelerometer full scale measurement range to use. + * Refer to FullScaleRange for more details. + */ + optional accel_fsr; + + /** + * The priority of returns for the lidar to output. + * Refer to ReturnOrder for more details. + */ + optional return_order; + + /** + * The minimum detection range of the lidar in cm. + */ + optional min_range_threshold_cm; }; /** Stores data format information. */ @@ -438,7 +491,7 @@ struct sensor_info { /* Return an updated version of the metadata string reflecting any * changes to the sensor_info. * Errors out if changes are incompatible but does not check for validity */ - std::string updated_metadata_string(); + std::string updated_metadata_string() const; bool has_fields_equal(const sensor_info& other) const; @@ -599,7 +652,7 @@ std::string to_string(OperatingMode mode); /** * Get operating mode from string. * - * @param s String to get the operating mode from. + * @param[in] s String to get the operating mode from. * * @return operating mode corresponding to the string, or 0 on error. */ @@ -706,11 +759,49 @@ std::string to_string(UDPProfileIMU profile); */ optional udp_profile_imu_of_string(const std::string& s); +/** + * Get full scale range setting from string + * + * @param[in] s The string to decode into a full scale range. + * + * @return full scale range corresponding to the string, or nullopt on error. + */ +optional full_scale_range_of_string(const std::string& s); + +/** + * Get return order setting from string + * + * @param[in] s The string to decode into a return order. + * + * @return return order corresponding to the string, or nullopt on error. + */ +optional return_order_of_string(const std::string& s); + +/** + * Get string representation of a Return Order. + * + * @param[in] return_order The return order to get the string + * representation of. + * + * @return string representation of the return order. + */ +std::string to_string(ReturnOrder return_order); + +/** + * Get string representation of a Full Scale Range. + * + * @param[in] full_scale_range The shot limiting status to get the string + * representation of. + * + * @return string representation of the full scale range. + */ +std::string to_string(FullScaleRange full_scale_range); + /** * Get string representation of a Shot Limiting Status. * * @param[in] shot_limiting_status The shot limiting status to get the string - * representation of. + * representation of. * * @return string representation of the shot limiting status. */ @@ -720,7 +811,7 @@ std::string to_string(ShotLimitingStatus shot_limiting_status); * Get string representation of Thermal Shutdown Status. * * @param[in] thermal_shutdown_status The thermal shutdown status to get the - * string representation of. + * string representation of. * * @return string representation of thermal shutdown status. */ @@ -743,7 +834,8 @@ void check_signal_multiplier(const double signal_multiplier); * * @param[in] metadata a text blob returned by get_metadata from client.h. * @param[in] skip_beam_validation whether to skip validation on metdata - not - * for use on recorded data or metadata from sensors + * for use on recorded data or metadata + * from sensors * * @return a sensor_info struct populated with a subset of the metadata. */ @@ -757,7 +849,8 @@ sensor_info parse_metadata(const std::string& metadata, * * @param[in] json_file path to a json file containing sensor metadata. * @param[in] skip_beam_validation whether to skip validation on metadata - not - * for use on recorded data or metadata from sensors + * for use on recorded data or metadata + * from sensors * * @return a sensor_info struct populated with a subset of the metadata. */ @@ -815,7 +908,7 @@ std::string convert_to_legacy(const std::string& metadata); * Get a string representation of sensor calibration. Only set fields will be * represented. * - * @param[in] calibraiton a struct of calibration. + * @param[in] cal a struct of calibration. * * @return string representation of sensor calibration. */ @@ -829,6 +922,16 @@ std::string to_string(const calibration_status& cal); */ std::string client_version(); +/** + * Get version information from the metadata. + * + * @param[in] metadata string. + * + * @return version corresponding to the string, or invalid_version on error. + */ +ouster::util::version firmware_version_from_metadata( + const std::string& metadata); + // clang-format off /** Tag to identitify a paricular value reported in the sensor channel data * block. */ @@ -954,6 +1057,8 @@ class packet_format { const size_t col_size; const size_t packet_footer_size; + const uint64_t max_frame_id; + /** * Read the packet type packet header. * @@ -1253,9 +1358,13 @@ class packet_format { int field_bitness(ChanField f) const; }; +/** @defgroup OusterClientTypeGetFormat Get Packet Format functions */ + /** * Get a packet parser for a particular data format. * + * @ingroup OusterClientTypeGetFormat + * * @param[in] info parameters provided by the sensor. * * @return a packet_format suitable for parsing UDP packets sent by the sensor. @@ -1265,6 +1374,8 @@ const packet_format& get_format(const sensor_info& info); /** * Get a packet parser for a particular data format. * + * @ingroup OusterClientTypeGetFormat + * * @param[in] udp_profile_lidar lidar profile * @param[in] pixels_per_column pixels per column * @param[in] columns_per_packet columns per packet @@ -1282,7 +1393,12 @@ struct Packet { uint64_t host_timestamp; std::vector buf; - Packet(int size = 65536) : host_timestamp{0}, buf(size) {} + Packet(int size = 65536) : host_timestamp{0} { + // this is necessary due to how client works - it may read size() + 1 + // bytes into the packet in case of rogue packet coming through + buf.reserve(size + 1); + buf.resize(size, 0); + } template PacketType& as() { diff --git a/ouster_client/include/ouster/udp_packet_source.h b/ouster_client/include/ouster/udp_packet_source.h new file mode 100644 index 00000000..15e2a17a --- /dev/null +++ b/ouster_client/include/ouster/udp_packet_source.h @@ -0,0 +1,622 @@ +/** + * Copyright (c) 2023, Ouster, Inc. + * All rights reserved. + * + * @file + * @brief Wrapper around sensor::client to provide buffering + * + * *Not* a public API. Currently part of the Python bindings implementation. + * + * Maintains a single-producer / single-consumer circular buffer that can be + * populated by a thread without holding the GIL to deal the relatively small + * default OS buffer size and high sensor UDP data rate. Must be thread-safe to + * allow reading data without holding the GIL while other references to the + * client exist. + */ + +#pragma once + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "ouster/client.h" +#include "ouster/impl/ring_buffer.h" +#include "ouster/types.h" + +namespace ouster { +namespace sensor { +namespace impl { + +struct Event { + int source; + client_state state; + + bool operator==(const Event& other) const { + return source == other.source && state == other.state; + } +}; + +} // namespace impl +} // namespace sensor +} // namespace ouster + +namespace std { + +template <> +struct hash { + std::size_t operator()(const ouster::sensor::impl::Event& e) const { + auto h = std::hash{}; + return h(e.source) ^ h(static_cast(e.state)); + } +}; + +} // namespace std + +namespace ouster { +namespace sensor { +namespace impl { + +using EventSet = std::unordered_set; + +/** + * Thread safe event queue. + * + * Safe to use with many-to-many producers/consumers, although some + * considerations apply. + * + * Two main ways of usage are: one queue per multiple consumers, using + * next(events) to have consumers wait on specific events, or using + * multiple queues, one per consumer, as implemented in publisher/subscriber. + */ +class EventQueue { + mutable std::mutex m; + mutable std::condition_variable cv; + std::deque q; + + template + Event _next(Predicate&& p) { + Event e; + { + std::unique_lock lock{m}; + cv.wait(lock, p); + + e = q.front(); + q.pop_front(); + } + cv.notify_all(); + return e; + } + + template + Event _next_timeout(float sec, Predicate&& p) { + Event e; + { + std::unique_lock lock{m}; + using fsec = std::chrono::duration; + bool timeout = !cv.wait_for(lock, fsec{sec}, p); + + if (timeout) return {-1, client_state::TIMEOUT}; + + e = q.front(); + q.pop_front(); + } + cv.notify_all(); + return e; + } + + public: + /** + * Push an event to the back of the queue. + * + * Notifies all threads waiting on the queue. + * + * @param[in] e event + */ + void push(Event e) { + { + std::lock_guard lock{m}; + q.push_back(e); + } + cv.notify_all(); + } + + /** + * Push a [first,last) range of events to the back of the queue. + * + * Notifies all threads waiting on the queue. + * + * @param[in] first iterator to the first event + * @param[in] last past-the-end iterator + */ + template + void push(EventIterT first, EventIterT last) { + { + std::lock_guard lock{m}; + q.insert(q.end(), first, last); + } + cv.notify_all(); + } + + /** + * Push an event to the front of the queue. + * + * Notifies all threads waiting on the queue. + * + * @param[in] e event + */ + void push_priority(Event e) { + { + std::lock_guard lock{m}; + q.push_front(e); + } + cv.notify_all(); + } + + /** + * Pop an event from the front of the queue. + * If the queue is empty, blocks until an event is pushed. + * + * Notifies all other threads waiting on the queue. + * + * @return event + */ + Event pop() { + return _next([this] { return !q.empty(); }); + } + + /** + * Pop an event from the front of the queue. + * If the queue is empty, blocks until an event is pushed or timeout_sec + * seconds have passed + * + * Notifies all other threads waiting on the queue. + * + * @param[in] timeout_sec timeout time in seconds + * @return event or Event{-1, client_state::TIMEOUT} in case of timeout + */ + Event pop(float timeout_sec) { + return _next_timeout(timeout_sec, [this] { return !q.empty(); }); + } + + /** + * Pop an event of the set of events from the front of the queue. + * Blocks until a suitable event is at the front of the queue. + * + * Notifies all other threads waiting on the queue. + * + * @param[in] events subset of events to wait for + * @return event + */ + Event next(const EventSet& events) { + return _next([this, &events] { + return !q.empty() && events.count(q.front()) == 1; + }); + } + + /** + * Pop an event of the set of events from the front of the queue. + * Blocks until a suitable event is at the front of the queue, or + * timeout_sec seconds have passed. + * + * Notifies all other threads waiting on the queue. + * + * @param[in] timeout_sec timeout time in seconds + * @param[in] events subset of events to wait for + * @return event or Event{-1, client_state::TIMEOUT} in case of timeout + */ + Event next(float timeout_sec, const EventSet& events) { + return _next_timeout(timeout_sec, [this, &events] { + return !q.empty() && events.count(q.front()) == 1; + }); + } + + /** + * Flush the queue, immediately returning all elements. + * + * @return queue with all remaining events + */ + // TODO: [[nodiscard]] once we move to cpp17 -- Tim T. + std::deque flush() { + std::lock_guard lock{m}; + std::deque out; + out.swap(q); + return out; + } +}; + +class Publisher { + protected: + EventSet events_; + std::shared_ptr q_; + + public: + /** + * Construct publisher accepting a corresponding set of events + * + * @param[in] events set of events to accept + */ + Publisher(EventSet events) + : events_(std::move(events)), q_(std::make_shared()) {} + + /** + * Construct empty publisher with events to be set later + */ + Publisher() : Publisher(EventSet{}) {} + + /** + * Sets publisher to accept events of type `e` + * + * @param[in] e Event type to accept + */ + void set_accept(Event e) { events_.insert(e); } + + /** + * Checks whether publisher accepts event type. + * + * @param[in] e Event type + * @return true if publisher accepts events of type e + */ + bool accepts(Event e) const { return events_.count(e); } + + /** + * Publish event to the publisher queue. + * + * @param[in] e event + * @param[in] to_front if true, publishes event to the front of the queue + */ + void publish(Event e, bool to_front = false) { + if (accepts(e)) { + if (to_front) { + q_->push_priority(e); + } else { + q_->push(e); + } + } + } + + /** + * Retrieve internal event queue. + * + * Mainly used for constructing subscribers. + * + * @return shared pointer to EventQueue + */ + std::shared_ptr queue() { return q_; } +}; + +class Subscriber { + protected: + std::shared_ptr q_; + std::shared_ptr> rb_; + + static bool _has_packet(Event e) { + return e.state & (client_state::LIDAR_DATA | client_state::IMU_DATA); + } + + public: + Subscriber(std::shared_ptr q, + std::shared_ptr> rb) + : q_(q), rb_(rb) {} + + Subscriber(Subscriber&& other) : q_(), rb_() { + std::swap(q_, other.q_); + std::swap(rb_, other.rb_); + } + + /** + * Pop the next event from the queue. + * + * Blocks thread until event is available. + * + * @return event + */ + Event pop() { return q_->pop(); } + + /** + * Pop the next event from the queue. + * + * Blocks thread until event is available, or timeout is reached. + * + * @param[in] timeout_sec timeout in seconds + * @return event or {-1, client_state::TIMEOUT} + */ + Event pop(float timeout_sec) { return q_->pop(timeout_sec); } + + /** + * Retrieve the packet to the corresponding event. + * + * Packet is guaranteed to stay valid until advance() is called. + * Will throw if the event does not correspond to any packets. + * + * @param[in] e event + * @return packet corresponding to the event + */ + Packet& packet(Event e) { return rb_->front(e); } + const Packet& packet(Event e) const { return rb_->front(e); } + + /** + * Advance the ring buffer read index for the corresponding event. + */ + void advance(Event e) { + if (_has_packet(e)) rb_->pop(e); + } + + /** + * Flush the queue, releasing all corresponding packets from the ring buffer + */ + void flush() { + auto events = q_->flush(); + for (const auto& e : events) { + if (e.state == client_state::EXIT) { + // return exit event back to the queue for later processing + q_->push_priority(e); + } else { + advance(e); + } + } + } +}; + +class Producer { + protected: + std::vector> pubs_; + std::vector> clients_; + std::shared_ptr> rb_; + + std::mutex mtx_; + std::atomic stop_; + + bool _verify() const; + + public: + Producer() + : rb_(std::make_shared>()), stop_(false) {} + + // TODO: move out to client_state extensions of some sort + /* Extra bit flag compatible with client_state to signal buffer overflow. */ + static constexpr int CLIENT_OVERFLOW = 0x10; + + /** + * Add client and allocate buffers for it. + * + * @param[in] cli shared_ptr with initialized client + * @param[in] lidar_buf_size size of the lidar buffer, in packets + * @param[in] lidar_packet_size size of the lidar packet, in bytes + * @param[in] imu_buf_size size of the imu buffer, in packets + * @param[in] imu_packet_size size of the imu packet, in bytes + * @return id of the client used in produced events e.g. + * Event{id, client_state} + */ + int add_client(std::shared_ptr cli, size_t lidar_buf_size, + size_t lidar_packet_size, size_t imu_buf_size, + size_t imu_packet_size); + + /** + * Add client and allocate buffers for it. + * + * Calculates the buffer sizes for the client based on hz rate and provided + * seconds_to_buffer parameter. + * + * @param[in] cli shared_ptr with initialized client + * @param[in] info sensor_info corresponding to the client + * @param[in] seconds_to_buffer amount of seconds worth of buffer allocation + * @return id of the client used in produced events e.g. + * Event{id, client_state} + */ + int add_client(std::shared_ptr cli, const sensor_info& info, + float seconds_to_buffer); + + /** + * Subscribe to a preassembled publisher. + * + * @param[in] pub shared_ptr containing preassembled publisher + * @return shared_ptr with subscriber corresponding to the publisher + */ + std::shared_ptr subscribe(std::shared_ptr pub); + + /** + * Subscribe to a specific set of events. + * + * @param[in] events set of events to subscribe to + * @return shared_ptr with subscriber waiting on the events + */ + std::shared_ptr subscribe(EventSet events); + + /** + * Write data from the network into the circular buffer, reporting events to + * publishers. + * + * Internally verifies that at least some publishers are subscribed to all + * of the events that could be reported by the producer, otherwise returns + * early. + * + * Will return when either shutdown() is called by one of the threads, or + * when CLIENT_ERROR or EXIT are reported by clients. + */ + void run(); + + /** + * Signal the producer to exit and reports EXIT event to all listening + * subscribers, then waits for producer thread to exit before returning. + * + * Additionally, clears all internal publishers and buffers. + */ + void shutdown(); + + /** + * Reports total amount of currently stored packets in internal buffers. + * + * NOTE: this is not a great metric since it does not report specific + * buffers, but the total amount instead. + */ + size_t size() const { return rb_->size(); } + + /** + * Reports total allocated capacity of packets stored in internal buffers. + * + * NOTE: this is not a great metric since it does not report specific + * buffers, but the total amount instead. + */ + size_t capacity() const { return rb_->capacity(); } +}; + +class UDPPacketSource : protected Producer, protected Subscriber { + void _accept_client_events(int id); + + public: + UDPPacketSource(); + + /** + * Add client and allocate buffers for it. + * + * @param[in] cli shared_ptr with initialized client + * @param[in] lidar_buf_size size of the lidar buffer, in packets + * @param[in] lidar_packet_size size of the lidar packet, in bytes + * @param[in] imu_buf_size size of the imu buffer, in packets + * @param[in] imu_packet_size size of the imu packet, in bytes + * @return id of the client used in produced events e.g. + * Event{id, client_state} + */ + void add_client(std::shared_ptr cli, size_t lidar_buf_size, + size_t lidar_packet_size, size_t imu_buf_size, + size_t imu_packet_size); + + /** + * Add client and allocate buffers for it. + * + * Calculates the buffer sizes for the client based on hz rate and provided + * seconds_to_buffer parameter. + * + * @param[in] cli shared_ptr with initialized client + * @param[in] info sensor_info corresponding to the client + * @param[in] seconds_to_buffer amount of seconds worth of buffer allocation + * @return id of the client used in produced events e.g. + * Event{id, client_state} + */ + void add_client(std::shared_ptr cli, const sensor_info& info, + float seconds_to_buffer); + + using Producer::capacity; + using Producer::shutdown; + using Producer::size; + void produce() { Producer::run(); } + + using Subscriber::advance; + using Subscriber::flush; + using Subscriber::packet; + using Subscriber::pop; +}; + +class BufferedUDPSource : protected Producer, protected Subscriber { + BufferedUDPSource(); + + public: + /** + * Listen for sensor data using client + * + * @param[in] client externally created client + * @param[in] lidar_buf_size size of the lidar buffer, in packets + * @param[in] lidar_packet_size size of the lidar packet, in bytes + * @param[in] imu_buf_size size of the imu buffer, in packets + * @param[in] imu_packet_size size of the imu packet, in bytes + */ + BufferedUDPSource(std::shared_ptr client, size_t lidar_buf_size, + size_t lidar_packet_size, size_t imu_buf_size, + size_t imu_packet_size); + + /** + * Listen for sensor data using client + * + * Calculates the buffer sizes for the client based on hz rate and provided + * seconds_to_buffer parameter. + * + * @param[in] client externally created client + * @param[in] info sensor_info corresponding to the client + * @param[in] seconds_to_buffer amount of seconds worth of buffer allocation + */ + BufferedUDPSource(std::shared_ptr client, const sensor_info& info, + float seconds_to_buffer); + + using Producer::capacity; + using Producer::shutdown; + using Producer::size; + void produce() { Producer::run(); } + + using Subscriber::flush; + + /** + * Pop the next client_state from the queue. + * + * Blocks thread until client_state is available. + * + * @return client_state + */ + client_state pop() { return Subscriber::pop().state; } + + /** + * Pop the next client_state from the queue. + * + * Blocks thread until client_state is available, or timeout is reached. + * + * @param[in] timeout_sec timeout in seconds + * @return client_state or client_state::TIMEOUT + */ + client_state pop(float timeout_sec) { + return Subscriber::pop(timeout_sec).state; + } + + /** + * Retrieve the packet to the corresponding client_state. + * + * Packet is guaranteed to stay valid until advance() is called. + * Will throw if the event does not correspond to any packets. + * + * @param[in] st client_state + * @return packet corresponding to st + */ + Packet& packet(client_state st) { return Subscriber::packet({0, st}); } + const Packet& packet(client_state st) const { + return Subscriber::packet({0, st}); + } + + /** + * Advances read in internal buffers + * + * @param[in] st client_state to advance. Does nothing if st is not one of + * LIDAR_DATA or IMU_DATA + */ + void advance(client_state st) { Subscriber::advance({0, st}); } + + /** + * Read next available packet in the buffer. + * + * If client_state returns LIDAR_DATA, submitted lidar packet will be + * populated, similarly if client_state returns IMU_DATA, submitted + * imu packet will be populated instead. + * + * Blocks if the queue is empty for up to `timeout_sec` (zero means wait + * forever). Should only be called by the consumer thread. If reading from + * the network was blocked because the buffer was full, the the + * CLIENT_OVERFLOW flag will be set on the next returned status. + * + * @param[in] lidarp lidar packet to read into + * @param[in] imup imu packet to read into + * @param[in] timeout_sec maximum time to wait for data. + * @return client status, see sensor::poll_client(). + */ + client_state consume(LidarPacket& lidarp, ImuPacket& imup, + float timeout_sec); +}; + +} // namespace impl +} // namespace sensor +} // namespace ouster diff --git a/ouster_client/include/ouster/version.h b/ouster_client/include/ouster/version.h index c47854e9..fe162cab 100644 --- a/ouster_client/include/ouster/version.h +++ b/ouster_client/include/ouster/version.h @@ -109,7 +109,17 @@ std::string to_string(const version& v); * * @return version corresponding to the string, or invalid_version on error. */ -version version_of_string(const std::string& s); +[[deprecated("Use version_from_string instead")]] version version_of_string( + const std::string& s); + +/** + * Get version from string. + * + * @param[in] ver string. + * + * @return version corresponding to the string, or invalid_version on error. + */ +version version_from_string(const std::string& ver); } // namespace util } // namespace ouster diff --git a/ouster_client/src/buffered_udp_source.cpp b/ouster_client/src/buffered_udp_source.cpp deleted file mode 100644 index db3e474b..00000000 --- a/ouster_client/src/buffered_udp_source.cpp +++ /dev/null @@ -1,240 +0,0 @@ -/** - * Copyright (c) 2021, Ouster, Inc. - * All rights reserved. - */ - -#include "ouster/buffered_udp_source.h" - -#include -#include -#include -#include -#include - -#include "ouster/client.h" -#include "ouster/types.h" - -/* - * Locks are not held during the actual buffer reads and writes: thread safety - * w.r.t the producer relies on the invariants that only the consumer modifies - * the read index and only the producer modifies the write index, always while - * holding cv_mtx_ to make sure that cv notifications are not lost between - * checking the empty/full condition and entering the waiting state. - */ -namespace ouster { -namespace sensor { -namespace impl { - -using fsec = std::chrono::duration; - -/* - * Initialize the internal circular buffer. - * - * NOTE: capacity_ is the capacity of the internal buffer vector, not the - * max number of buffered packets (which is one less). - */ -BufferedUDPSource::BufferedUDPSource(size_t buf_size) - : capacity_{buf_size + 1} { - std::generate_n(std::back_inserter(bufs_), capacity_, [&] { - return std::make_pair(client_state::CLIENT_ERROR, Packet()); - }); -} - -BufferedUDPSource::BufferedUDPSource(const std::string& hostname, - int lidar_port, int imu_port, - size_t buf_size) - : BufferedUDPSource(buf_size) { - cli_ = init_client(hostname, lidar_port, imu_port); - if (!cli_) throw std::runtime_error("Failed to initialize client"); - lidar_port_ = sensor::get_lidar_port(*cli_); - imu_port_ = sensor::get_imu_port(*cli_); -} - -BufferedUDPSource::BufferedUDPSource(const std::string& hostname, - const std::string& udp_dest_host, - lidar_mode mode, timestamp_mode ts_mode, - int lidar_port, int imu_port, - int timeout_sec, size_t buf_size) - : BufferedUDPSource(buf_size) { - cli_ = init_client(hostname, udp_dest_host, mode, ts_mode, lidar_port, - imu_port, timeout_sec); - if (!cli_) throw std::runtime_error("Failed to initialize client"); - lidar_port_ = sensor::get_lidar_port(*cli_); - imu_port_ = sensor::get_imu_port(*cli_); -} - -std::string BufferedUDPSource::get_metadata(int timeout_sec, - bool legacy_format) { - std::unique_lock lock(cli_mtx_, std::try_to_lock); - if (!lock.owns_lock()) - throw std::invalid_argument( - "Another thread is already using the client"); - if (!cli_) throw std::invalid_argument("Client has already been shut down"); - return sensor::get_metadata(*cli_, timeout_sec, legacy_format); -} - -/* - * Invariant: nothing can access cli_ when stop_ is true. Producer will - * release _cli_mtx_ only when it exits the loop. - */ -void BufferedUDPSource::shutdown() { - { - std::unique_lock lock{cv_mtx_}; - if (stop_) return; - stop_ = true; - } - cv_.notify_all(); - - // close UDP sockets when any producer has exited - std::lock_guard cli_lock{cli_mtx_}; - cli_.reset(); -} - -/* - * Advance the read index to drop data. Can only be called by the consumer to - * maintain the invariant that only the reader modifies the read index. - */ -void BufferedUDPSource::flush(size_t n_packets) { - { - std::unique_lock lock{cv_mtx_}; - auto sz = (capacity_ + write_ind_ - read_ind_) % capacity_; - auto n = (n_packets == 0) ? sz : std::min(sz, n_packets); - read_ind_ = (capacity_ + read_ind_ + n) % capacity_; - } - cv_.notify_one(); -} - -size_t BufferedUDPSource::size() { - std::unique_lock lock{cv_mtx_}; - return (capacity_ + write_ind_ - read_ind_) % capacity_; -} - -size_t BufferedUDPSource::capacity() { return (capacity_ - 1); } - -client_state BufferedUDPSource::consume(uint8_t* buf, size_t buf_sz, - float timeout_sec) { - // wait for producer to wake us up if the queue is empty - { - std::unique_lock lock{cv_mtx_}; - bool timeout = !cv_.wait_for(lock, fsec{timeout_sec}, [this] { - return stop_ || write_ind_ != read_ind_; - }); - if (timeout) - return client_state::TIMEOUT; - else if (stop_) - return client_state::EXIT; - } - - // read data into buffer - auto sz = std::min(buf_sz, packet_size); - auto& e = bufs_[read_ind_]; - std::memcpy(buf, e.second.buf.data(), sz); // TODO: Law of Demeter - - // advance read ind and unblock producer, if necessary - { - std::unique_lock lock{cv_mtx_}; - read_ind_ = (read_ind_ + 1) % capacity_; - } - cv_.notify_one(); - return e.first; -} - -client_state BufferedUDPSource::consume(LidarPacket& lidarp, ImuPacket& imup, - float timeout_sec) { - // wait for producer to wake us up if the queue is empty - { - std::unique_lock lock{cv_mtx_}; - bool timeout = !cv_.wait_for(lock, fsec{timeout_sec}, [this] { - return stop_ || write_ind_ != read_ind_; - }); - if (timeout) - return client_state::TIMEOUT; - else if (stop_) - return client_state::EXIT; - } - - // read data into buffer - auto& e = bufs_[read_ind_]; - - auto write_packet = [&e](auto& packet) { - auto sz = std::min(packet.buf.size(), packet_size); - std::memcpy(packet.buf.data(), e.second.buf.data(), sz); - packet.host_timestamp = e.second.host_timestamp; - }; - - if (e.first & client_state::LIDAR_DATA) { - write_packet(lidarp); - } else if (e.first & client_state::IMU_DATA) { - write_packet(imup); - } - - // advance read ind and unblock producer, if necessary - { - std::unique_lock lock{cv_mtx_}; - read_ind_ = (read_ind_ + 1) % capacity_; - } - cv_.notify_one(); - return e.first; -} - -/* - * Hold the client mutex to protect client state and prevent multiple - * producers from running concurrently. - */ -void BufferedUDPSource::produce(const packet_format& pf) { - std::lock_guard cli_lock{cli_mtx_}; - - auto exit_mask = - client_state(client_state::CLIENT_ERROR | client_state::EXIT); - auto st = client_state(0); - - while (!(st & exit_mask)) { - // Wait for consumer to wake us up if the queue is full - bool overflow = false; - { - std::unique_lock lock{cv_mtx_}; - while (!stop_ && (write_ind_ + 1) % capacity_ == read_ind_) { - overflow = true; - cv_.wait(lock); - } - if (stop_) return; - } - - // Write data and status to circular buffer. EXIT and ERROR status - // are just passed through with stale data. - st = poll_client(*cli_); - if (st == client_state::TIMEOUT) continue; - - auto& e = bufs_[write_ind_]; - if (st & LIDAR_DATA) { - LidarPacket& packet = e.second.as(); - if (!read_lidar_packet(*cli_, packet, pf)) continue; - } else if (st & IMU_DATA) { - ImuPacket& packet = e.second.as(); - if (!read_imu_packet(*cli_, packet, pf)) continue; - } - if (overflow) st = client_state(st | CLIENT_OVERFLOW); - e.first = st; - - // Advance write ind and wake up consumer, if blocked - { - std::unique_lock lock{cv_mtx_}; - write_ind_ = (write_ind_ + 1) % capacity_; - } - cv_.notify_one(); - } -} - -int BufferedUDPSource::get_lidar_port() { - std::lock_guard lock{cv_mtx_}; - return stop_ ? 0 : lidar_port_; -} - -int BufferedUDPSource::get_imu_port() { - std::lock_guard lock{cv_mtx_}; - return stop_ ? 0 : imu_port_; -} - -} // namespace impl -} // namespace sensor -} // namespace ouster diff --git a/ouster_client/src/client.cpp b/ouster_client/src/client.cpp index 42f57686..a5b21107 100644 --- a/ouster_client/src/client.cpp +++ b/ouster_client/src/client.cpp @@ -24,7 +24,8 @@ #include #include "logging.h" -#include "netcompat.h" +#include "ouster/impl/client_poller.h" +#include "ouster/impl/netcompat.h" #include "ouster/sensor_http.h" #include "ouster/types.h" @@ -425,8 +426,7 @@ bool init_logger(const std::string& log_level, const std::string& log_file_path, std::shared_ptr init_client(const std::string& hostname, int lidar_port, int imu_port) { logger().info( - "initializing sensor client: {} expecting lidar port/imu port: {}/{} " - "(0 means a random port will be chosen)", + "initializing sensor client: {} expecting lidar port/imu port: {}/{}", hostname, lidar_port, imu_port); auto cli = std::make_shared(); @@ -445,9 +445,10 @@ std::shared_ptr init_client(const std::string& hostname, const std::string& udp_dest_host, lidar_mode ld_mode, timestamp_mode ts_mode, int lidar_port, int imu_port, - int timeout_sec) { + int timeout_sec, bool persist_config) { auto cli = init_client(hostname, lidar_port, imu_port); if (!cli) return std::shared_ptr(); + logger().info("(0 means a random port will be chosen)"); // update requested ports to actual bound ports lidar_port = get_sock_port(cli->lidar_fd); @@ -466,6 +467,7 @@ std::shared_ptr init_client(const std::string& hostname, if (ts_mode) config.ts_mode = ts_mode; if (lidar_port) config.udp_port_lidar = lidar_port; if (imu_port) config.udp_port_imu = imu_port; + if (persist_config) config_flags |= CONFIG_PERSIST; config.operating_mode = OPERATING_NORMAL; set_config(hostname, config, config_flags); @@ -487,21 +489,23 @@ std::shared_ptr init_client(const std::string& hostname, std::shared_ptr mtp_init_client(const std::string& hostname, const sensor_config& config, const std::string& mtp_dest_host, - bool main, int timeout_sec) { + bool main, int timeout_sec, + bool persist_config) { + int lidar_port = config.udp_port_lidar ? config.udp_port_lidar.value() : 0; + int imu_port = config.udp_port_imu ? config.udp_port_imu.value() : 0; + auto udp_dest = config.udp_dest ? config.udp_dest.value() : ""; + logger().info( "initializing sensor client: {} expecting ports: {}/{}, multicast " "group: {} (0 means a random port will be chosen)", - hostname, config.udp_port_lidar.value(), config.udp_port_imu.value(), - config.udp_dest.value()); + hostname, lidar_port, imu_port, udp_dest); auto cli = std::make_shared(); cli->hostname = hostname; - cli->lidar_fd = mtp_data_socket(config.udp_port_lidar.value(), - config.udp_dest.value(), mtp_dest_host); + cli->lidar_fd = mtp_data_socket(lidar_port, udp_dest, mtp_dest_host); cli->imu_fd = mtp_data_socket( - config.udp_port_imu - .value()); // no need to join multicast group second time + imu_port); // no need to join multicast group second time if (!impl::socket_valid(cli->lidar_fd) || !impl::socket_valid(cli->imu_fd)) return std::shared_ptr(); @@ -515,6 +519,7 @@ std::shared_ptr mtp_init_client(const std::string& hostname, uint8_t config_flags = 0; if (lidar_port) config_copy.udp_port_lidar = lidar_port; if (imu_port) config_copy.udp_port_imu = imu_port; + if (persist_config) config_flags |= CONFIG_PERSIST; config_copy.operating_mode = OPERATING_NORMAL; set_config(hostname, config_copy, config_flags); @@ -534,33 +539,76 @@ std::shared_ptr mtp_init_client(const std::string& hostname, return cli; } -client_state poll_client(const client& c, const int timeout_sec) { +namespace impl { + +struct client_poller { fd_set rfds; - FD_ZERO(&rfds); - FD_SET(c.lidar_fd, &rfds); - FD_SET(c.imu_fd, &rfds); + SOCKET max_fd; + client_state err; +}; + +std::shared_ptr make_poller() { + return std::make_unique(); +} + +void reset_poll(client_poller& poller) { + FD_ZERO(&poller.rfds); + poller.max_fd = 0; + poller.err = client_state::TIMEOUT; +} + +void set_poll(client_poller& poller, const client& c) { + FD_SET(c.lidar_fd, &poller.rfds); + FD_SET(c.imu_fd, &poller.rfds); + poller.max_fd = std::max({poller.max_fd, c.lidar_fd, c.imu_fd}); +} +int poll(client_poller& poller, int timeout_sec) { timeval tv; tv.tv_sec = timeout_sec; tv.tv_usec = 0; - SOCKET max_fd = std::max(c.lidar_fd, c.imu_fd); - - SOCKET retval = select((int)max_fd + 1, &rfds, NULL, NULL, &tv); + SOCKET retval = + select((int)poller.max_fd + 1, &poller.rfds, NULL, NULL, &tv); - client_state res = client_state(0); + if (!impl::socket_valid(retval)) { + if (impl::socket_exit()) { + poller.err = client_state::EXIT; + } else { + logger().error("select: {}", impl::socket_get_error()); + poller.err = client_state::CLIENT_ERROR; + } - if (!impl::socket_valid(retval) && impl::socket_exit()) { - res = EXIT; - } else if (!impl::socket_valid(retval)) { - logger().error("select: {}", impl::socket_get_error()); - res = client_state(res | CLIENT_ERROR); - } else if (retval) { - if (FD_ISSET(c.lidar_fd, &rfds)) res = client_state(res | LIDAR_DATA); - if (FD_ISSET(c.imu_fd, &rfds)) res = client_state(res | IMU_DATA); + return -1; } - return res; + return (int)retval; +} + +client_state get_error(const client_poller& poller) { return poller.err; } + +client_state get_poll(const client_poller& poller, const client& c) { + client_state s = client_state(0); + + if (FD_ISSET(c.lidar_fd, &poller.rfds)) s = client_state(s | LIDAR_DATA); + if (FD_ISSET(c.imu_fd, &poller.rfds)) s = client_state(s | IMU_DATA); + + return s; +} + +} // namespace impl + +client_state poll_client(const client& c, const int timeout_sec) { + impl::client_poller poller; + impl::reset_poll(poller); + impl::set_poll(poller, c); + int res = impl::poll(poller, timeout_sec); + if (res <= 0) { + // covers TIMEOUT and error states + return impl::get_error(poller); + } else { + return impl::get_poll(poller, c); + } } static bool recv_fixed(SOCKET fd, void* buf, int64_t len) { @@ -578,38 +626,44 @@ static bool recv_fixed(SOCKET fd, void* buf, int64_t len) { return false; } +bool read_lidar_packet(const client& cli, uint8_t* buf, size_t bytes) { + return recv_fixed(cli.lidar_fd, buf, bytes); +} + bool read_lidar_packet(const client& cli, uint8_t* buf, const packet_format& pf) { - return recv_fixed(cli.lidar_fd, buf, pf.lidar_packet_size); + return read_lidar_packet(cli, buf, pf.lidar_packet_size); } -bool read_lidar_packet(const client& cli, LidarPacket& packet, - const packet_format& pf) { +bool read_lidar_packet(const client& cli, LidarPacket& packet) { auto now = std::chrono::high_resolution_clock::now(); packet.host_timestamp = std::chrono::duration_cast( now.time_since_epoch()) .count(); - return read_lidar_packet(cli, packet.buf.data(), pf); + return read_lidar_packet(cli, packet.buf.data(), packet.buf.size()); +} + +bool read_imu_packet(const client& cli, uint8_t* buf, size_t bytes) { + return recv_fixed(cli.imu_fd, buf, bytes); } bool read_imu_packet(const client& cli, uint8_t* buf, const packet_format& pf) { - return recv_fixed(cli.imu_fd, buf, pf.imu_packet_size); + return read_imu_packet(cli, buf, pf.imu_packet_size); } -bool read_imu_packet(const client& cli, ImuPacket& packet, - const packet_format& pf) { +bool read_imu_packet(const client& cli, ImuPacket& packet) { auto now = std::chrono::high_resolution_clock::now(); packet.host_timestamp = std::chrono::duration_cast( now.time_since_epoch()) .count(); - return read_imu_packet(cli, packet.buf.data(), pf); + return read_imu_packet(cli, packet.buf.data(), packet.buf.size()); } -int get_lidar_port(client& cli) { return get_sock_port(cli.lidar_fd); } +int get_lidar_port(const client& cli) { return get_sock_port(cli.lidar_fd); } -int get_imu_port(client& cli) { return get_sock_port(cli.imu_fd); } +int get_imu_port(const client& cli) { return get_sock_port(cli.imu_fd); } bool in_multicast(const std::string& addr) { return IN_MULTICAST(ntohl(inet_addr(addr.c_str()))); diff --git a/ouster_client/src/lidar_scan.cpp b/ouster_client/src/lidar_scan.cpp index 7efeb368..bf66eae4 100644 --- a/ouster_client/src/lidar_scan.cpp +++ b/ouster_client/src/lidar_scan.cpp @@ -172,6 +172,39 @@ LidarScan::LidarScan(size_t w, size_t h, LidarScanFieldTypes field_types, } } +LidarScan::LidarScan(const LidarScan& ls_src, + const LidarScanFieldTypes& field_types) + : timestamp_(ls_src.timestamp_), + packet_timestamp_(ls_src.packet_timestamp_), + measurement_id_(ls_src.measurement_id_), + status_(ls_src.status_), + pose_(ls_src.pose_), + field_types_(field_types), + w(ls_src.w), + h(ls_src.h), + frame_status(ls_src.frame_status), + frame_id(ls_src.frame_id) { + // Initialize fields + for (const auto& ft : field_types_) { + if (fields_.count(ft.first) > 0) + throw std::invalid_argument("Duplicated fields found"); + fields_[ft.first] = impl::FieldSlot{ft.second, static_cast(w), + static_cast(h)}; + } + + // Copy fields + for (const auto& ft : field_types) { + if (ls_src.field_type(ft.first)) { + ouster::impl::visit_field(*this, ft.first, + ouster::impl::copy_and_cast(), ls_src, + ft.first); + } else { + ouster::impl::visit_field(*this, ft.first, + ouster::impl::zero_field()); + } + } +} + LidarScan::LidarScan(size_t w, size_t h, sensor::UDPProfileLidar profile, size_t columns_per_packet) : LidarScan{w, h, impl::lookup_scan_fields(profile), columns_per_packet} {} @@ -239,6 +272,24 @@ Eigen::Ref> LidarScan::packet_timestamp() return packet_timestamp_; } +uint64_t LidarScan::get_first_valid_packet_timestamp() const { + int total_packets = packet_timestamp().size(); + if (total_packets == 0) { + return 0; // prevent a divide by zero + } + int columns_per_packet = w / total_packets; + + for (int i = 0; i < total_packets; ++i) { + if (status() + .middleRows(i * columns_per_packet, columns_per_packet) + .unaryExpr([](uint32_t s) { return s & 1; }) + .any()) + return packet_timestamp()[i]; + } + + return 0; +} + Eigen::Ref> LidarScan::measurement_id() { return measurement_id_; } @@ -461,7 +512,6 @@ ScanBatcher::ScanBatcher(size_t w, const sensor::packet_format& pf) h(pf.pixels_per_column), next_valid_m_id(0), next_headers_m_id(0), - next_valid_packet_id(0), cache(pf.lidar_packet_size), cache_packet_ts(0), pf(pf) { @@ -714,7 +764,7 @@ bool ScanBatcher::operator()(const uint8_t* packet_buf, uint64_t packet_ts, this->operator()(cache.data(), cache_packet_ts, ls); } - const uint16_t f_id = pf.frame_id(packet_buf); + const uint64_t f_id = pf.frame_id(packet_buf); const bool raw_headers = impl::raw_headers_enabled(pf, ls); @@ -722,14 +772,13 @@ bool ScanBatcher::operator()(const uint8_t* packet_buf, uint64_t packet_ts, // expecting to start batching a new scan next_valid_m_id = 0; next_headers_m_id = 0; - next_valid_packet_id = 0; ls.frame_id = f_id; - + zero_header_cols(ls, 0, w); + ls.packet_timestamp().setZero(); const uint8_t f_thermal_shutdown = pf.thermal_shutdown(packet_buf); const uint8_t f_shot_limiting = pf.shot_limiting(packet_buf); ls.frame_status = frame_status(f_thermal_shutdown, f_shot_limiting); - - } else if (ls.frame_id == static_cast(f_id + 1)) { + } else if (ls.frame_id == ((f_id + 1) % (pf.max_frame_id + 1))) { // drop reordered packets from the previous frame return false; } else if (ls.frame_id != f_id) { @@ -746,14 +795,6 @@ bool ScanBatcher::operator()(const uint8_t* packet_buf, uint64_t packet_ts, field_type.first, end_m_id, w); } - zero_header_cols(ls, next_valid_m_id, w); - - // zero packet timestamp separately, since it's packet level data - ls.packet_timestamp() - .segment(next_valid_packet_id, - ls.packet_timestamp().rows() - next_valid_packet_id) - .setZero(); - // store packet buf and ts data to the cache for later processing std::memcpy(cache.data(), packet_buf, cache.size()); cache_packet_ts = packet_ts; @@ -767,13 +808,6 @@ bool ScanBatcher::operator()(const uint8_t* packet_buf, uint64_t packet_ts, const uint16_t packet_id = pf.col_measurement_id(col0_buf) / pf.columns_per_packet; if (packet_id < ls.packet_timestamp().rows()) { - if (packet_id >= next_valid_packet_id) { - // zeroing skipped packets timestamps - ls.packet_timestamp() - .segment(next_valid_packet_id, packet_id - next_valid_packet_id) - .setZero(); - next_valid_packet_id = packet_id + 1; - } ls.packet_timestamp()[packet_id] = packet_ts; } diff --git a/ouster_client/src/netcompat.cpp b/ouster_client/src/netcompat.cpp index 3c2d4731..4c43430f 100644 --- a/ouster_client/src/netcompat.cpp +++ b/ouster_client/src/netcompat.cpp @@ -3,7 +3,7 @@ * All rights reserved. */ -#include "netcompat.h" +#include "ouster/impl/netcompat.h" #include diff --git a/ouster_client/src/parsing.cpp b/ouster_client/src/parsing.cpp index cd7f934b..a88180f1 100644 --- a/ouster_client/src/parsing.cpp +++ b/ouster_client/src/parsing.cpp @@ -9,6 +9,7 @@ #include #include #include +#include #include #include #include @@ -181,6 +182,8 @@ struct packet_format::Impl { size_t col_footer_size; size_t packet_footer_size; + uint64_t max_frame_id; + size_t col_size; size_t lidar_packet_size; @@ -202,6 +205,12 @@ struct packet_format::Impl { col_footer_size = legacy ? 4 : 0; packet_footer_size = legacy ? 0 : 32; + if (profile == UDPProfileLidar::PROFILE_FUSA_RNG15_RFL8_NIR8_DUAL) { + max_frame_id = std::numeric_limits::max(); + } else { + max_frame_id = std::numeric_limits::max(); + } + col_size = col_header_size + pixels_per_column * channel_data_size + col_footer_size; lidar_packet_size = packet_header_size + columns_per_packet * col_size + @@ -233,7 +242,8 @@ packet_format::packet_format(UDPProfileLidar udp_profile_lidar, col_header_size{impl_->col_header_size}, col_footer_size{impl_->col_footer_size}, col_size{impl_->col_size}, - packet_footer_size{impl_->packet_footer_size} { + packet_footer_size{impl_->packet_footer_size}, + max_frame_id{impl_->max_frame_id} { for (const auto& kv : impl_->fields) { field_types_.push_back({kv.first, kv.second.ty_tag}); } @@ -757,6 +767,128 @@ void packet_writer::set_frame_id(uint8_t* lidar_buf, uint32_t frame_id) const { std::memcpy(lidar_buf + 2, &f_id, sizeof(f_id)); } +// Helpers for weird sized ints +// TODO: generalise when/if we need other uintXX_t fractionals +class uint24_t { + protected: + uint8_t _internal[3]; + + public: + uint24_t() {} + + uint24_t(const uint32_t val) { *this = val; } + + uint24_t(const uint24_t& val) { *this = val; } + + operator uint32_t() const { + return (_internal[2] << 16) | (_internal[1] << 8) | (_internal[0] << 0); + } + + uint24_t& operator=(const uint24_t& input) { + _internal[0] = input._internal[0]; + _internal[1] = input._internal[1]; + _internal[2] = input._internal[2]; + + return *this; + } + + uint24_t& operator=(const uint32_t input) { + _internal[0] = ((unsigned char*)&input)[0]; + _internal[1] = ((unsigned char*)&input)[1]; + _internal[2] = ((unsigned char*)&input)[2]; + + return *this; + } +}; + +class uint40_t { + protected: + uint8_t _internal[5]; + + public: + uint40_t() {} + + uint40_t(const uint64_t val) { *this = val; } + + uint40_t(const uint40_t& val) { *this = val; } + + operator uint64_t() const { + return (((uint64_t)_internal[4]) << 32) | + (((uint64_t)_internal[3]) << 24) | + (((uint64_t)_internal[2]) << 16) | + (((uint64_t)_internal[1]) << 8) | + (((uint64_t)_internal[0]) << 0); + } + + uint40_t& operator=(const uint40_t& input) { + _internal[0] = input._internal[0]; + _internal[1] = input._internal[1]; + _internal[2] = input._internal[2]; + _internal[3] = input._internal[3]; + _internal[4] = input._internal[4]; + + return *this; + } + + uint40_t& operator=(const uint64_t input) { + _internal[0] = ((unsigned char*)&input)[0]; + _internal[1] = ((unsigned char*)&input)[1]; + _internal[2] = ((unsigned char*)&input)[2]; + _internal[3] = ((unsigned char*)&input)[3]; + _internal[4] = ((unsigned char*)&input)[4]; + + return *this; + } +}; + +#pragma pack(push, 1) +// Relevant parts of packet headers as described/named in sensor documentation +struct FUSAHeader { + uint8_t packet_type; + uint24_t init_id; + uint32_t frame_id; + uint24_t padding; + uint40_t serial_no; +}; + +struct ConfigurableHeader { + uint16_t packet_type; + uint16_t frame_id; + uint24_t init_id; + uint40_t serial_no; +}; +#pragma pack(pop) + +void packet_writer::set_init_id(uint8_t* lidar_buf, uint32_t init_id) const { + if (udp_profile_lidar == UDPProfileLidar::PROFILE_LIDAR_LEGACY) { + // LEGACY profile has no init_id + return; + } + if (udp_profile_lidar == + UDPProfileLidar::PROFILE_FUSA_RNG15_RFL8_NIR8_DUAL) { + auto hdr = (FUSAHeader*)lidar_buf; + hdr->init_id = init_id; + } else { + auto hdr = (ConfigurableHeader*)lidar_buf; + hdr->init_id = init_id; + } +} + +void packet_writer::set_prod_sn(uint8_t* lidar_buf, uint64_t sn) const { + if (udp_profile_lidar == UDPProfileLidar::PROFILE_LIDAR_LEGACY) { + // LEGACY profile has no prod_sn + return; + } + if (udp_profile_lidar == + UDPProfileLidar::PROFILE_FUSA_RNG15_RFL8_NIR8_DUAL) { + auto hdr = (FUSAHeader*)lidar_buf; + hdr->serial_no = sn; + } else { + auto hdr = (ConfigurableHeader*)lidar_buf; + hdr->serial_no = sn; + } +} + template void packet_writer::set_px(uint8_t* px_buf, ChanField i, T value) const { const auto& f = impl_->fields.at(i); diff --git a/ouster_client/src/sensor_http.cpp b/ouster_client/src/sensor_http.cpp index 56e4172a..9691e798 100644 --- a/ouster_client/src/sensor_http.cpp +++ b/ouster_client/src/sensor_http.cpp @@ -1,12 +1,9 @@ #include "ouster/sensor_http.h" -#include - #include "curl_client.h" #include "sensor_http_imp.h" #include "sensor_tcp_imp.h" -using std::stoul; using std::string; using namespace ouster::util; @@ -23,19 +20,7 @@ string SensorHttp::firmware_version_string(const string& hostname, version SensorHttp::firmware_version(const string& hostname, int timeout_sec) { auto result = firmware_version_string(hostname, timeout_sec); - auto rgx = std::regex(R"(v(\d+).(\d+)\.(\d+))"); - std::smatch matches; - std::regex_search(result, matches, rgx); - - if (matches.size() < 4) return invalid_version; - - try { - return version{static_cast(stoul(matches[1])), - static_cast(stoul(matches[2])), - static_cast(stoul(matches[3]))}; - } catch (const std::exception&) { - return invalid_version; - } + return ouster::util::version_from_string(result); } std::unique_ptr SensorHttp::create(const string& hostname, diff --git a/ouster_client/src/sensor_http_imp.h b/ouster_client/src/sensor_http_imp.h index dc5ae06b..578ef70d 100644 --- a/ouster_client/src/sensor_http_imp.h +++ b/ouster_client/src/sensor_http_imp.h @@ -24,7 +24,8 @@ class SensorHttpImp : public util::SensorHttp { /** * Constructs an http interface to communicate with the sensor. * - * @param[in] hostname hostname of the sensor to communicate with. + * @param[in] hostname Hostname of the sensor to communicate with. + * @param[in] timeout_sec The timeout to use in seconds. */ SensorHttpImp(const std::string& hostname, int timeout_sec); @@ -144,6 +145,7 @@ class SensorHttpImp_2_1 : public SensorHttpImp_2_2 { * Constructs an http interface to communicate with the sensor. * * @param[in] hostname hostname of the sensor to communicate with. + * @param[in] timeout_sec The timeout to use in seconds. */ SensorHttpImp_2_1(const std::string& hostname, int timeout_sec); diff --git a/ouster_client/src/sensor_info.cpp b/ouster_client/src/sensor_info.cpp index 3fd8a8b0..8d3b8eff 100644 --- a/ouster_client/src/sensor_info.cpp +++ b/ouster_client/src/sensor_info.cpp @@ -685,7 +685,7 @@ Json::Value info_to_nested_json(const sensor_info& info) { } // TODO refactor for performance since we're parsing -std::string sensor_info::updated_metadata_string() { +std::string sensor_info::updated_metadata_string() const { Json::StreamWriterBuilder builder; builder["enableYAMLCompatibility"] = true; builder["precision"] = 6; @@ -716,7 +716,9 @@ std::string sensor_info::updated_metadata_string() { logger().info( "Outputting updated metadata string based on non-legacy format " "of original metadata"); - if (this->fw_rev.substr(0, 2) == "v1") { + using namespace ouster::util; + auto fw_version = ouster::util::version_from_string(fw_rev); + if (fw_version != invalid_version && fw_version.major == 1) { // NOTE: currently updated_metatadata_string does not handle // outputting udp_dest and operating_mode back into udp_ip and // auto_start_flag for FW 1.12, 1.13, 1.14 in the config_params @@ -994,5 +996,46 @@ sensor_info parse_metadata(const std::string& metadata, return sensor_info(metadata, skip_beam_validation); } +// TODO: do we need to expose this method? +std::string get_firmware_version(const Json::Value& metadata_root) { + auto fw_ver = std::string{}; + if (metadata_root["sensor_info"].isObject()) { + if (metadata_root["sensor_info"].isMember("semver")) { + // This is only true for 3.2 and later + fw_ver = metadata_root["sensor_info"]["semver"].asString(); + } else if (metadata_root["sensor_info"].isMember("build_rev")) { + // fall back to build_rev + fw_ver = metadata_root["sensor_info"]["build_rev"].asString(); + } + } + return fw_ver; +} + +ouster::util::version firmware_version_from_metadata( + const std::string& metadata) { + if (metadata.empty()) { + throw std::invalid_argument( + "firmware_version_from_metadata metadata empty!"); + } + + Json::Value root{}; + Json::CharReaderBuilder builder{}; + std::string errors{}; + std::stringstream ss{metadata}; + + if (!Json::parseFromStream(builder, ss, &root, &errors)) + throw std::runtime_error{ + "Errors parsing metadata for parse_metadata: " + errors}; + + auto fw_ver = get_firmware_version(root); + if (fw_ver.empty()) { + throw std::runtime_error( + "firmware_version_from_metadata failed to deduce version info from " + "metadata!"); + } + + return ouster::util::version_from_string(fw_ver); +} + } // namespace sensor } // namespace ouster diff --git a/ouster_client/src/sensor_tcp_imp.h b/ouster_client/src/sensor_tcp_imp.h index c2b2afa6..c70d375f 100644 --- a/ouster_client/src/sensor_tcp_imp.h +++ b/ouster_client/src/sensor_tcp_imp.h @@ -9,7 +9,7 @@ #pragma once -#include "netcompat.h" +#include "ouster/impl/netcompat.h" #include "ouster/sensor_http.h" namespace ouster { diff --git a/ouster_client/src/types.cpp b/ouster_client/src/types.cpp index 5c088afe..2aa5a5c7 100644 --- a/ouster_client/src/types.cpp +++ b/ouster_client/src/types.cpp @@ -12,6 +12,7 @@ #include #include #include +#include #include #include #include @@ -28,6 +29,7 @@ namespace ouster { using nonstd::make_optional; using nonstd::nullopt; using nonstd::optional; +using std::stoul; namespace sensor { @@ -118,6 +120,19 @@ Table udp_profile_imu_strings{{ {PROFILE_IMU_LEGACY, "LEGACY"}, }}; +Table full_scale_range_strings{{ + {FSR_NORMAL, "NORMAL"}, + {FSR_EXTENDED, "EXTENDED"}, +}}; + +Table return_order_strings{{ + {ORDER_STRONGEST_TO_WEAKEST, "STRONGEST_TO_WEAKEST"}, + {ORDER_FARTHEST_TO_NEAREST, "FARTHEST_TO_NEAREST"}, + {ORDER_NEAREST_TO_FARTHEST, "NEAREST_TO_FARTHEST"}, + {ORDER_DEPRECATED_STRONGEST_RETURN_FIRST, "STRONGEST_RETURN_FIRST"}, + {ORDER_DEPRECATED_LAST_RETURN_FIRST, "LAST_RETURN_FIRST"}, +}}; + // TODO: should we name them something better? feel like the most important is // SHOT_LIMITING_NORMAL Table shot_limiting_status_strings{{ @@ -188,7 +203,11 @@ bool operator==(const sensor_config& lhs, const sensor_config& rhs) { lhs.phase_lock_offset == rhs.phase_lock_offset && lhs.columns_per_packet == rhs.columns_per_packet && lhs.udp_profile_lidar == rhs.udp_profile_lidar && - lhs.udp_profile_imu == rhs.udp_profile_imu); + lhs.udp_profile_imu == rhs.udp_profile_imu && + lhs.gyro_fsr == rhs.gyro_fsr && + lhs.accel_fsr == rhs.accel_fsr && + lhs.return_order == rhs.return_order && + lhs.min_range_threshold_cm == rhs.min_range_threshold_cm); } bool operator!=(const sensor_config& lhs, const sensor_config& rhs) { @@ -372,6 +391,14 @@ optional polarity_of_string(const std::string& s) { return rlookup(impl::polarity_strings, s.c_str()); } +optional full_scale_range_of_string(const std::string& s) { + return rlookup(impl::full_scale_range_strings, s.c_str()); +} + +optional return_order_of_string(const std::string& s) { + return rlookup(impl::return_order_strings, s.c_str()); +} + std::string to_string(NMEABaudRate rate) { auto res = lookup(impl::nmea_baud_rate_strings, rate); return res ? res.value() : "UNKNOWN"; @@ -453,6 +480,18 @@ std::string to_string(ThermalShutdownStatus thermal_shutdown_status) { return res ? res.value() : "UNKNOWN"; } +std::string to_string(ReturnOrder return_order) { + auto res = + lookup(impl::return_order_strings, return_order); + return res ? res.value() : "UNKNOWN"; +} + +std::string to_string(FullScaleRange full_scale_range) { + auto res = + lookup(impl::full_scale_range_strings, full_scale_range); + return res ? res.value() : "UNKNOWN"; +} + void check_signal_multiplier(const double signal_multiplier) { std::string signal_multiplier_error = "Provided signal multiplier is invalid: " + @@ -697,6 +736,40 @@ sensor_config parse_config(const Json::Value& root) { config.udp_profile_imu = udp_profile_imu_of_string(root["udp_profile_imu"].asString()); + // Firmware 3.1 and higher options + if (!root["gyro_fsr"].empty()) { + auto gyro_fsr = + full_scale_range_of_string(root["gyro_fsr"].asString()); + if (gyro_fsr) { + config.gyro_fsr = gyro_fsr; + } else { + throw std::runtime_error{"Unexpected Gyro FSR"}; + } + } + + if (!root["accel_fsr"].empty()) { + auto accel_fsr = + full_scale_range_of_string(root["accel_fsr"].asString()); + if (accel_fsr) { + config.accel_fsr = accel_fsr; + } else { + throw std::runtime_error{"Unexpected Accel FSR"}; + } + } + + if (!root["return_order"].empty()) { + auto return_order = + return_order_of_string(root["return_order"].asString()); + if (return_order) { + config.return_order = return_order; + } else { + throw std::runtime_error{"Unexpected Return Order"}; + } + } + + if (!root["min_range_threshold_cm"].empty()) + config.min_range_threshold_cm = root["min_range_threshold_cm"].asInt(); + return config; } @@ -812,6 +885,19 @@ Json::Value config_to_json(const sensor_config& config) { if (config.udp_profile_imu) root["udp_profile_imu"] = to_string(config.udp_profile_imu.value()); + // Firmware 3.1 and higher options + if (config.gyro_fsr) + root["gyro_fsr"] = to_string(config.gyro_fsr.value()); + + if (config.accel_fsr) + root["accel_fsr"] = to_string(config.accel_fsr.value()); + + if (config.min_range_threshold_cm) + root["min_range_threshold_cm"] = config.min_range_threshold_cm.value(); + + if (config.return_order) + root["return_order"] = to_string(config.return_order.value()); + return root; } @@ -852,5 +938,21 @@ version version_of_string(const std::string& s) { return invalid_version; } +version version_from_string(const std::string& v) { + auto rgx = std::regex(R"(v?(\d+).(\d+)\.(\d+))"); + std::smatch matches; + std::regex_search(v, matches, rgx); + + if (matches.size() < 4) return invalid_version; + + try { + return version{static_cast(stoul(matches[1])), + static_cast(stoul(matches[2])), + static_cast(stoul(matches[3]))}; + } catch (const std::exception&) { + return invalid_version; + } +} + } // namespace util } // namespace ouster diff --git a/ouster_client/src/udp_packet_source.cpp b/ouster_client/src/udp_packet_source.cpp new file mode 100644 index 00000000..f07d6178 --- /dev/null +++ b/ouster_client/src/udp_packet_source.cpp @@ -0,0 +1,326 @@ +/** + * Copyright (c) 2021, Ouster, Inc. + * All rights reserved. + */ + +#include "ouster/udp_packet_source.h" + +#include +#include +#include +#include +#include + +#include "logging.h" +#include "ouster/client.h" +#include "ouster/impl/client_poller.h" +#include "ouster/types.h" + +namespace ouster { +namespace sensor { +namespace impl { + +std::string to_string(client_state st) { + switch (static_cast(st)) { + case client_state::TIMEOUT: + return "TIMEOUT"; + case client_state::CLIENT_ERROR: + return "CLIENT_ERROR"; + case client_state::LIDAR_DATA: + return "LIDAR_DATA"; + case client_state::IMU_DATA: + return "IMU_DATA"; + case client_state::EXIT: + return "EXIT"; + case Producer::CLIENT_OVERFLOW: + return "OVERFLOW"; + default: + return "UNKNOWN_EVENT"; + } +} + +std::string to_string(Event e) { + return std::string("{") + std::to_string(e.source) + ", " + + to_string(e.state) + "}"; +} + +int Producer::add_client(std::shared_ptr cli, size_t lidar_buf_size, + size_t lidar_packet_size, size_t imu_buf_size, + size_t imu_packet_size) { + std::unique_lock lock{mtx_, std::defer_lock}; + if (!lock.try_lock()) + throw std::runtime_error("add_client called on a running producer"); + + if (!cli) throw std::runtime_error("add_client called with nullptr"); + + int id = clients_.size(); + clients_.push_back(cli); + rb_->allocate({id, client_state::LIDAR_DATA}, lidar_buf_size, + Packet(lidar_packet_size)); + rb_->allocate({id, client_state::IMU_DATA}, imu_buf_size, + Packet(imu_packet_size)); + return id; +} + +int Producer::add_client(std::shared_ptr cli, const sensor_info& info, + float seconds_to_buffer) { + const data_format& df = info.format; + uint32_t packets_per_frame = df.columns_per_frame / df.columns_per_packet; + float lidar_hz = static_cast(packets_per_frame) * df.fps; + float imu_hz = 100.f; + const packet_format& pf = get_format(info); + return add_client(cli, static_cast(lidar_hz * seconds_to_buffer), + pf.lidar_packet_size, + static_cast(imu_hz * seconds_to_buffer), + pf.imu_packet_size); +} + +std::shared_ptr Producer::subscribe( + std::shared_ptr pub) { + std::unique_lock lock{mtx_, std::defer_lock}; + if (!lock.try_lock()) + throw std::runtime_error("subscribe called on a running producer"); + + pubs_.push_back(pub); + return std::make_shared(pub->queue(), rb_); +} + +std::shared_ptr Producer::subscribe(EventSet events) { + auto pub = std::make_shared(events); + return subscribe(pub); +} + +bool Producer::_verify() const { + if (clients_.size() == 0) { + logger().error("Producer started with no clients"); + return false; + } + + if (pubs_.size() == 0) { + logger().error("Producer started with no publishers"); + return false; + } + + bool out = true; + + Event last_chk; + + auto n_pubs_accept = [this, &last_chk](Event e) { + last_chk = e; + auto l = [e](int total, auto& pub) { + return total + static_cast(pub->accepts(e)); + }; + return std::accumulate(pubs_.begin(), pubs_.end(), 0, l); + }; + + if (n_pubs_accept({-1, client_state::CLIENT_ERROR}) == 0) { + logger().error("Producer: none of the publishers accept {}", + to_string(last_chk)); + out = false; + } + + if (n_pubs_accept({-1, client_state::EXIT}) == 0) { + logger().error("Producer: none of the publishers accept {}", + to_string(last_chk)); + out = false; + } + + for (int i = 0, end = clients_.size(); i < end; ++i) { + if (n_pubs_accept({i, client_state::LIDAR_DATA}) != 1) { + logger().error( + "Producer: {} publishers accept {}, needs to be exactly one", + n_pubs_accept(last_chk), to_string(last_chk)); + out = false; + } + + if (n_pubs_accept({i, client_state::IMU_DATA}) != 1) { + logger().error( + "Producer: {} publishers accept {}, needs to be exactly one", + n_pubs_accept(last_chk), to_string(last_chk)); + out = false; + } + + if (n_pubs_accept({i, client_state(Producer::CLIENT_OVERFLOW)}) == 0) { + logger().error("Producer: no publishers accept {}", + to_string(last_chk)); + } + } + return out; +} + +static bool read_packet(const client& cli, Packet& packet, client_state st) { + switch (st) { + case client_state::LIDAR_DATA: + return read_lidar_packet(cli, packet.as()); + case client_state::IMU_DATA: + return read_imu_packet(cli, packet.as()); + default: + return false; + } +} + +static client_state operator&(client_state a, client_state b) { + int a_i = static_cast(a); + int b_i = static_cast(b); + return static_cast(a_i & b_i); +} + +void Producer::run() { + // check publisher/client consistency + if (!_verify()) return; + + std::vector overflows(clients_.size(), false); + + // this could be a private virtual instead + auto handle_event = [this, &overflows](Event e) { + const client_state overflow = client_state(Producer::CLIENT_OVERFLOW); + switch (e.state) { + case 0: + break; + case client_state::CLIENT_ERROR: + case client_state::EXIT: + for (auto& pub : pubs_) pub->publish(e); + break; + case client_state::LIDAR_DATA: + case client_state::IMU_DATA: + if (rb_->full(e)) { + if (!overflows[e.source]) { + overflows[e.source] = true; + for (auto& pub : pubs_) { + // publish with priority + pub->publish({e.source, overflow}, true); + } + } + } else if (read_packet(*clients_[e.source], rb_->back(e), + e.state)) { + rb_->push(e); + for (auto& pub : pubs_) pub->publish(e); + overflows[e.source] = false; + } + break; + default: + break; + } + }; + + std::lock_guard lock{mtx_}; + + std::shared_ptr poller = make_poller(); + while (!stop_) { + reset_poll(*poller); + + for (auto& cli : clients_) set_poll(*poller, *cli); + + int res = poll(*poller); + + if (res == 0) { // TIMEOUT + continue; + } else if (res < 0) { // CLIENT_ERROR / EXIT + client_state st = get_error(*poller); + handle_event({-1, st & client_state::CLIENT_ERROR}); + handle_event({-1, st & client_state::EXIT}); + break; + } else { + for (int i = 0, end = clients_.size(); i < end; ++i) { + client_state st = get_poll(*poller, *clients_[i]); + handle_event({i, st & client_state::LIDAR_DATA}); + handle_event({i, st & client_state::IMU_DATA}); + } + } + } +} + +/* + * Producer will release mtx_ only when it exits the loop. + */ +void Producer::shutdown() { + stop_ = true; + for (auto& pub : pubs_) pub->publish({-1, client_state::EXIT}); + + std::lock_guard lock{mtx_}; + // close UDP sockets when any producer has exited + clients_.clear(); + pubs_.clear(); + rb_.reset(new RingBufferMap()); + stop_ = false; +} + +UDPPacketSource::UDPPacketSource() + : Producer(), + Subscriber(std::move(*Producer::subscribe( + {{-1, client_state::CLIENT_ERROR}, {-1, client_state::EXIT}}))) {} + +void UDPPacketSource::_accept_client_events(int id) { + pubs_[0]->set_accept({id, client_state::LIDAR_DATA}); + pubs_[0]->set_accept({id, client_state::IMU_DATA}); + pubs_[0]->set_accept({id, client_state(Producer::CLIENT_OVERFLOW)}); +} + +void UDPPacketSource::add_client(std::shared_ptr cli, + size_t lidar_buf_size, + size_t lidar_packet_size, size_t imu_buf_size, + size_t imu_packet_size) { + _accept_client_events(Producer::add_client( + cli, lidar_buf_size, lidar_packet_size, imu_buf_size, imu_packet_size)); +} +void UDPPacketSource::add_client(std::shared_ptr cli, + const sensor_info& info, + float seconds_to_buffer) { + _accept_client_events(Producer::add_client(cli, info, seconds_to_buffer)); +} + +BufferedUDPSource::BufferedUDPSource() + : Producer(), + Subscriber(std::move(*Producer::subscribe( + {{-1, client_state::CLIENT_ERROR}, + {-1, client_state::EXIT}, + {0, client_state::LIDAR_DATA}, + {0, client_state::IMU_DATA}, + {0, client_state(Producer::CLIENT_OVERFLOW)}}))) {} + +BufferedUDPSource::BufferedUDPSource(std::shared_ptr client, + size_t lidar_buf_size, + size_t lidar_packet_size, + size_t imu_buf_size, + size_t imu_packet_size) + : BufferedUDPSource() { + Producer::add_client(client, lidar_buf_size, lidar_packet_size, + imu_buf_size, imu_packet_size); +} + +BufferedUDPSource::BufferedUDPSource(std::shared_ptr client, + const sensor_info& info, + float seconds_to_buffer) + : BufferedUDPSource() { + Producer::add_client(client, info, seconds_to_buffer); +} + +client_state BufferedUDPSource::consume(LidarPacket& lidarp, ImuPacket& imup, + float timeout_sec) { + Event e = Subscriber::pop(timeout_sec); + client_state st = e.state; + + // return early without advancing queue + if (!Subscriber::_has_packet(e)) return st; + + Packet& p = Subscriber::packet(e); + + auto write_packet = [&p](auto& packet) { + auto sz = std::min(packet.buf.size(), p.buf.size()); + std::memcpy(packet.buf.data(), p.buf.data(), sz); + packet.host_timestamp = p.host_timestamp; + }; + + if (st & client_state::LIDAR_DATA) { + write_packet(lidarp); + } else if (st & client_state::IMU_DATA) { + write_packet(imup); + } + + Subscriber::advance(e); + return st; +} + +} // namespace impl +} // namespace sensor +} // namespace ouster diff --git a/ouster_osf/CMakeLists.txt b/ouster_osf/CMakeLists.txt index 768c082b..ae07a49d 100644 --- a/ouster_osf/CMakeLists.txt +++ b/ouster_osf/CMakeLists.txt @@ -12,6 +12,7 @@ find_package(PNG REQUIRED) find_package(Eigen3 REQUIRED) find_package(jsoncpp REQUIRED) find_package(spdlog REQUIRED) +include(Coverage) # TODO: Extract to a separate FindFlatbuffers cmake file # Flatbuffers flatc resolution and different search name 'flatbuffers` with Conan @@ -91,12 +92,10 @@ add_library(ouster_osf STATIC src/compat_ops.cpp src/basics.cpp src/crc32.cpp src/metadata.cpp - src/writer.cpp src/meta_lidar_sensor.cpp src/meta_extrinsics.cpp src/meta_streaming_info.cpp src/stream_lidar_scan.cpp - src/layout_standard.cpp src/layout_streaming.cpp src/file.cpp src/reader.cpp @@ -104,8 +103,11 @@ add_library(ouster_osf STATIC src/compat_ops.cpp src/json_utils.cpp src/fb_utils.cpp src/pcap_source.cpp + src/writer.cpp ) +CodeCoverageFunctionality(ouster_osf) + if (OUSTER_OSF_NO_MMAP) target_compile_definitions(ouster_osf PRIVATE OUSTER_OSF_NO_MMAP) endif() diff --git a/ouster_osf/include/ouster/osf/basics.h b/ouster_osf/include/ouster/osf/basics.h index 59db4f09..5899f37d 100644 --- a/ouster_osf/include/ouster/osf/basics.h +++ b/ouster_osf/include/ouster/osf/basics.h @@ -30,32 +30,54 @@ namespace gen { using namespace v2; } +/** + * Enumerator for the OSF Version. This will change whenever the underlying + * flatbuffer structures change. + */ enum OSF_VERSION { - V_INVALID = 0, - V_1_0, // Original version of the OSF (2019/9/16) - V_1_1, // Add gps/imu/car trajectory to the OSF (2019/11/14) - V_1_2, // Change gps_waypoint type to Table in order to - // support Python language generator - V_1_3, // Add extension for Message in osfChunk - // and for Session in osfSession (2020/03/18) - V_1_4, // Gen2/128 support (2020/08/11) - - V_2_0 = 20 // Second Generation OSF v2 + V_INVALID = 0, ///< Invalid OSF Version + V_1_0, ///< Original version of the OSF (2019/9/16) + V_1_1, ///< Add gps/imu/car trajectory to the OSF (2019/11/14) + V_1_2, ///< Change gps_waypoint type to Table in order to + ///< support Python language generator + V_1_3, ///< Add extension for Message in osfChunk + ///< and for Session in osfSession (2020/03/18) + V_1_4, ///< Gen2/128 support (2020/08/11) + + V_2_0 = 20 ///< Second Generation OSF v2 }; -/// Chunking strategies. Refer to RFC0018 for more details. +/** + * Chunking strategies. Refer to RFC0018 for more details. + */ enum ChunksLayout { LAYOUT_STANDARD = 0, ///< not used currently LAYOUT_STREAMING = 1 ///< default layout (the only one for a user) }; +/** + * To String Functionality For ChunksLayout + * + * @param[in] chunks_layout The data to get the string representation format + * @return The string representation + */ std::string to_string(ChunksLayout chunks_layout); + +/** + * From String Conversion Functionality To ChunksLayout + * + * @param[in] s The String Representation of ChunksLayout + * @return The corrosponding ChunksLayout object + */ ChunksLayout chunks_layout_of_string(const std::string& s); // stable common types mapped to ouster::osf using v2::HEADER_STATUS; -/** Common timestamp for all time in ouster::osf */ +/** + * Common timestamp for all time in ouster::osf. + * Nanoseconds were chosen due to the data coming off of the sensor. + */ using ts_t = std::chrono::nanoseconds; /** @@ -64,26 +86,46 @@ using ts_t = std::chrono::nanoseconds; */ static constexpr uint32_t FLATBUFFERS_PREFIX_LENGTH = 4; -/** Return string representation of header */ +/** + * To String Functionality For HEADER_STATUS + * + * @param[in] status The data to get the string representation format + * @return The string representation + */ std::string to_string(const HEADER_STATUS status); -/** Debug method to get hex buf values in string */ +/** + * Debug method to get hex buf values in string + * + * @param[in] buf The buffer to dump to string. + * @param[in] count The size of the buffer. + * @param[in] max_show_count The number of bytes to dump. This arg is optional. + * @return The string representation + */ std::string to_string(const uint8_t* buf, const size_t count, const size_t max_show_count = 0); -/// Open read test file to a string. +/** + * Internal method for reading a file and returning the text + * data. + * + * @param[in] filename The file to read. + * @return The text of the file specified. + */ std::string read_text_file(const std::string& filename); /** * Reads the prefix size of the Flatbuffers buffer. First 4 bytes. - * @param buf pointer to Flatbuffers buffer stared with prefixed size + * + * @param[in] buf Pointer to Flatbuffers buffer stared with prefixed size * @return the size recovered from the stored prefix size */ uint32_t get_prefixed_size(const uint8_t* buf); /** * Calculates the full size of the block (prefixed_size + size + CRC32). - * @param buf pointer to Flatbuffers buffer stared with prefixed size + * + * @param[in] buf Pointer to Flatbuffers buffer stared with prefixed size * @return the calculated size of the block */ uint32_t get_block_size(const uint8_t* buf); @@ -91,70 +133,51 @@ uint32_t get_block_size(const uint8_t* buf); /** * Check the prefixed size buffer CRC32 fields. * - * @param buf is structured as size prefixed Flatbuffer buffer, i.e. first - * 4 bytes is the size of the buffer (excluding 4 bytes of the size), - * and the 4 bytes that follows right after the 4 + - * is the CRC32 bytes. - * @param max_size total number of bytes that can be accessed in the buffer, - * as a safety precaution if buffer is not well formed, or if - * first prefixed size bytes are broken. + * @param[in] buf Structured as size prefixed Flatbuffer buffer, i.e. first + * 4 bytes is the size of the buffer (excluding 4 bytes of the + * size), and the 4 bytes that follows right after the + * 4 + [prefixed_size] is the CRC32 bytes. + * @param[in] max_size Total number of bytes that can be accessed in the buffer, + * as a safety precaution if buffer is not well formed, or + * if first prefixed size bytes are broken. * @return true if CRC field is correct, false otherwise - * */ bool check_prefixed_size_block_crc( const uint8_t* buf, const uint32_t max_size = std::numeric_limits::max()); +/** @defgroup OsfBatchingFunctions Osf Batching Functions. */ + /** * Makes the closure to batch lidar_packets and emit LidarScan object. * Result returned through callback handler(ts, LidarScan). * LidarScan uses user modified field types + * + * @ingroup OsfBatchingFunctions + * + * @param[in] info The sensor metadata to use. + * @param[in] ls_field_types The field types to use. + * @param[in] handler The callback to use on the results. + * @return Closure to batch and emit LidarScan objects. */ -template std::function make_build_ls( const ouster::sensor::sensor_info& info, - const LidarScanFieldTypes& ls_field_types, ResultHandler&& handler) { - const auto w = info.format.columns_per_frame; - const auto h = info.format.pixels_per_column; - - std::shared_ptr ls(nullptr); - if (ls_field_types.empty()) { - auto default_ls_field_types = get_field_types(info); - ls = std::make_shared(w, h, default_ls_field_types.begin(), - default_ls_field_types.end()); - - } else { - ls = std::make_shared(w, h, ls_field_types.begin(), - ls_field_types.end()); - } - - auto pf = ouster::sensor::get_format(info); - auto build_ls_imp = ScanBatcher(w, pf); - osf::ts_t first_msg_ts{-1}; - return [handler, build_ls_imp, ls, first_msg_ts]( - const osf::ts_t msg_ts, const uint8_t* buf) mutable { - if (first_msg_ts == osf::ts_t{-1}) { - first_msg_ts = msg_ts; - } - if (build_ls_imp(buf, *ls)) { - handler(first_msg_ts, *ls); - // At this point we've just started accumulating new LidarScan, so - // we are saving the msg_ts (i.e. timestamp of a UDP packet) - // which contained the first lidar_packet - first_msg_ts = msg_ts; - } - }; -} + const LidarScanFieldTypes& ls_field_types, + std::function handler); /** * The above make_build_ls() function overload. In this function, LidarScan * uses default field types by the profile + * + * @ingroup OsfBatchingFunctions + * + * @param[in] info The sensor metadata to use. + * @param[in] handler The callback to use on the results. + * @return Closure to batch and emit LidarScan objects. */ -template std::function make_build_ls( - const ouster::sensor::sensor_info& info, ResultHandler&& handler) { - return make_build_ls(info, {}, handler); -} + const ouster::sensor::sensor_info& info, + std::function handler); } // namespace osf } // namespace ouster diff --git a/ouster_osf/include/ouster/osf/crc32.h b/ouster_osf/include/ouster/osf/crc32.h index d4efd575..45367b03 100644 --- a/ouster_osf/include/ouster/osf/crc32.h +++ b/ouster_osf/include/ouster/osf/crc32.h @@ -15,13 +15,20 @@ namespace ouster { namespace osf { -/// Size of the CRC field in a buffer +/** + * Size of the CRC field in a buffer + */ const uint32_t CRC_BYTES_SIZE = 4; +/** @defgroup OsfCRCFunctions Osf CRC Functions. */ + /** * Caclulate CRC value for the buffer of given size. (ZLIB version) - * @param buf pointer to the data buffer - * @param size size of the buffer in bytes + * + * @ingroup OsfCRCFunctions + * + * @param[in] buf Pointer to the data buffer. + * @param[in] size Size of the buffer in bytes. * @return CRC32 value */ uint32_t crc32(const uint8_t* buf, uint32_t size); @@ -29,12 +36,15 @@ uint32_t crc32(const uint8_t* buf, uint32_t size); /** * Caclulate and append CRC value for the buffer of given size and append * it to the initial crc value. (ZLIB version) - * @param initial_crc initial crc value to append to - * @param buf pointer to the data buffer - * @param size size of the buffer in bytes + * + * @ingroup OsfCRCFunctions + * + * @param[in] initial_crc Initial crc value to append to. + * @param[in] buf Pointer to the data buffer. + * @param[in] size Size of the buffer in bytes. * @return CRC32 value */ uint32_t crc32(uint32_t initial_crc, const uint8_t* buf, uint32_t size); } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/include/ouster/osf/file.h b/ouster_osf/include/ouster/osf/file.h index eaa0cd6e..67b112e7 100644 --- a/ouster_osf/include/ouster/osf/file.h +++ b/ouster_osf/include/ouster/osf/file.h @@ -16,12 +16,25 @@ namespace ouster { namespace osf { -enum class OpenMode : uint8_t { READ = 0, WRITE = 1 }; +/** + * Enum representing the available file opening modes. + */ +enum class OpenMode : uint8_t { + READ = 0, ///< Open the file in read-only mode. + WRITE = 1 ///< Open the file in write-only mode. (CURRENTLY NOT SUPPORTED) +}; -/** State of %OSF file */ -enum class FileState : uint8_t { GOOD = 0, BAD = 1 }; +/** + * Enum representing the state of the %OSF file. + */ +enum class FileState : uint8_t { + GOOD = 0, ///< The file is good. + BAD = 1 ///< There is something wrong with the file. +}; -/** Chunk buffer type to store raw byte buffers of data. */ +/** + * Chunk buffer type to store raw byte buffers of data. + */ using ChunkBuffer = std::vector; /** @@ -30,48 +43,121 @@ using ChunkBuffer = std::vector; */ class OsfFile { public: + /** + * Default constructor, sets most data to nullptr and 0. + */ explicit OsfFile(); /** - * Opens the file. + * Opens the OSF file. * @note Only OpenMode::READ is supported + * + * @param[in] filename The OSF file to open + * @param[in] mode The mode to open the file in, this argument is optional. */ explicit OsfFile(const std::string& filename, OpenMode mode = OpenMode::READ); + + /** + * Cleans up any filebuffers/memory mapping. + */ ~OsfFile(); // Header Info - uint64_t size() const { return size_; }; - std::string filename() const { return filename_; } + + /** + * Returns the size of the OSF file. + * + * @return The size of the OSF file in bytes. + */ + uint64_t size() const; + + /** + * Returns the filename of the open OSF file. + * + * @return The filename of the open OSF file. + */ + std::string filename() const; + + /** + * Returns the version of the OSF file. + * + * @return The version of the OSF file. + */ OSF_VERSION version(); + + /** + * Returns the offset in the OSF file where the + * metadata section is located. + * + * @throws std::logic_error Exception on bad osf file. + * + * @return Offset to the metadata in bytes + */ uint64_t metadata_offset(); + + /** + * Returns the offset in the OSF file where the + * chunk section is located. + * + * @throws std::logic_error Exception on bad osf file. + * + * @return Offset to the chunks in bytes + */ uint64_t chunks_offset(); - /** Checks the validity of header and session/file_info blocks. */ + /** + * Checks the validity of header and session/file_info blocks. + * + * @return If the header, session, and file_info blocks are valid. + */ bool valid(); /** - * Get the goodness of the file. + * Return the status of the OSF file. * @todo Need to have more states here (eod, valid, error, etc) + * + * @return If the OSF file is good or not. */ - bool good() const { return state_ == FileState::GOOD; } + bool good() const; // Convenience operators - bool operator!() const { return !good(); }; - explicit operator bool() const { return good(); }; + /** + * Return the negated status of the OSF file. + * + * @relates good + * + * @return If the OSF file is good or not, negated. + */ + bool operator!() const; /** - * Sequential access to the file. - * This is mimicking the regular file access with the offset + * Return the status of the OSF file. + * + * @relates good + * + * @return If the OSF file is good or not. */ - uint64_t offset() const { return offset_; } + explicit operator bool() const; + + /** + * Get the current offset in the OSF file. + * + * @return The current offset in the OSF file. + */ + uint64_t offset() const; /** * File seek (in mmap mode it's just moving the offset_ pointer * without any file system opeations.) - * @param pos position in the file + * + * @throws std::logic_error Exception on bad osf file. + * @throws std::out_of_range Exception on out of range seek. + * + * @param[in] pos position in the file + * @return A reference to `this` object. */ - OsfFile& seek(const uint64_t pos); + OsfFile& seek(uint64_t pos); /** * Read from file (in current mmap mode it's copying data from @@ -79,14 +165,34 @@ class OsfFile { * * @todo Handle errors in future and get the way to read them back * with FileState etc. + * + * @throws std::logic_error Exception on bad osf file. + * @throws std::out_of_range Exception on out of range read. + * + * @param[out] buf The buffer to write to. + * @param[in] count The number of bytes to write to buf. + * @return A reference to `this` object. */ OsfFile& read(uint8_t* buf, const uint64_t count); + /** + * Returns whether the OSF file is memory mapped or not. + * + * @return Is the OSF file memory mapped or not. + */ bool is_memory_mapped() const; /** * Mmap access to the file content with the specified offset from the * beginning of the file. + * + * @throws std::logic_error Exception on bad osf file. + * @throws std::logic_error Exception not being memory mapped. + * @throws std::out_of_range Exception on out of range read. + * + * @param[in] offset The specified offset to access into the OSF file, this + * argument is optional. + * @return The pointer to the OSF file. */ const uint8_t* buf(const uint64_t offset = 0) const; @@ -99,57 +205,120 @@ class OsfFile { */ void close(); - /** Debug helper method to dump OsfFile state to a string. */ + /** + * Debug helper method to dump OsfFile state to a string. + * + * @return The string representation of OsfFile + */ std::string to_string(); - // Copy policy - // Don't allow the copying of the file handler + /** + * Copy policy: + * Don't allow the copying of the file handler + */ OsfFile(const OsfFile&) = delete; + + /** + * @copydoc OsfFile::OsfFile(const OsfFile&) + */ OsfFile& operator=(const OsfFile&) = delete; - // Move policy - // But it's ok to move with the ownership transfer of the underlying file - // handler (mmap). + /** + * Move policy: + * Allow transferring ownership of the underlying file + * handler (mmap). + */ OsfFile(OsfFile&& other); + + /** + * @copydoc OsfFile::OsfFile(OsfFile&& other) + */ OsfFile& operator=(OsfFile&& other); + /** + * Read chunk specified at offset. + * + * @throws std::out_of_range Exception on out of range read. + * + * @param[in] offset The offset to read the chunk from. + * @return Shared pointer to the chunk. nullptr if osf file is bad + */ std::shared_ptr read_chunk(uint64_t offset); + /** + * Get a pointer to the start of the header chunk. + * + * @return Pointer to the header chunk. nullptr if filestream is bad. + */ uint8_t* get_header_chunk_ptr(); + + /** + * Get a pointer to the start of the header chunk. + * + * @return Pointer to the metadata chunk. nullptr if filestream is bad. + */ uint8_t* get_metadata_chunk_ptr(); private: - // Convenience method to set error and print it's content. - // TODO[pb]: Adding more error states will probably extend the set of this - // function. + /** + * Convenience method to set error and print it's content. + * + * @todo [pb] Adding more error states will probably extend the set of this + * function. + * + * @param[in] msg Message to print + */ void error(const std::string& msg = std::string()); - // Opened filename as it was passed in contructor. + /** + * Opened filename as it was passed in contructor. + */ std::string filename_; - // Current offset to the file. (not used in mmaped implementation) but used - // for copying(reading) blocks of memory from the file to the specified - // memory. + /** + * Current offset to the file. (not used in mmaped implementation) but used + * for copying(reading) blocks of memory from the file to the specified + * memory. + */ uint64_t offset_; - // Size of the opened file in bytes + /** + * Size of the opened file in bytes. + */ uint64_t size_; - // Mmaped memory address pointed to the beginning of the file (byte 0) + /** + * Mmaped memory address pointed to the beginning of the file (byte 0) + */ uint8_t* file_buf_; - // File reading access + /** + * File stream for reading. + */ std::ifstream file_stream_; + + /** + * Pointer for the osf file header chunk. + */ std::shared_ptr header_chunk_; + + /** + * Pointer for the metadata chunk + */ std::shared_ptr metadata_chunk_; - // Last read chunk cached, to save the double read on the sequence of verify - // and then read iterator access (used only when compiled with - // OUSTER_OSF_NO_MMAP, and in mmap version we rely on the OS/kernel caching) + /** + * Last read chunk cached, to save the double read on the sequence of verify + * and then read iterator access (used only when compiled with + * OUSTER_OSF_NO_MMAP, and in mmap version we rely on the OS/kernel caching) + */ std::shared_ptr chunk_cache_; + uint64_t chunk_cache_offset_; - // Internal state + /** + * Internal state of the OSF file. + */ FileState state_; }; diff --git a/ouster_osf/include/ouster/osf/layout_standard.h b/ouster_osf/include/ouster/osf/layout_standard.h deleted file mode 100644 index d1734d0d..00000000 --- a/ouster_osf/include/ouster/osf/layout_standard.h +++ /dev/null @@ -1,47 +0,0 @@ -/** - * Copyright (c) 2021, Ouster, Inc. - * All rights reserved. - * - * @file layout_standard.h - * @brief OSF Standard Layout strategy. - * - */ -#pragma once - -#include "ouster/osf/writer.h" - -namespace ouster { -namespace osf { - -constexpr uint32_t STANDARD_DEFAULT_CHUNK_SIZE = - 5 * 1024 * 1024; // not strict ... - -/** - * Standard Layout chunking strategy - * - * When messages laid out into chunks in the order as they come and not - * exceeding `chunk_size` (if possible). However if a single - * message size is bigger than specified `chunk_size` it's still recorded. - */ -class StandardLayoutCW : public ChunksWriter { - public: - StandardLayoutCW(Writer& writer, - uint32_t chunk_size = STANDARD_DEFAULT_CHUNK_SIZE); - void saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) override; - - void finish() override; - - uint32_t chunk_size() const override { return chunk_size_; } - - private: - void finish_chunk(); - - const uint32_t chunk_size_; - ChunkBuilder chunk_builder_{}; - - Writer& writer_; -}; - -} // namespace osf -} // namespace ouster \ No newline at end of file diff --git a/ouster_osf/include/ouster/osf/layout_streaming.h b/ouster_osf/include/ouster/osf/layout_streaming.h index 2d306b6b..904f6506 100644 --- a/ouster_osf/include/ouster/osf/layout_streaming.h +++ b/ouster_osf/include/ouster/osf/layout_streaming.h @@ -14,6 +14,15 @@ namespace ouster { namespace osf { +/** @defgroup OSFStreamingDefaultSize OSF Streaming Default Size. */ + +/** + * Default Streaming Chunk Size. + * This is used in StreamingLayoutCW + * + * @ingroup OSFStreamingDefaultSize + * @relates StreamingLayoutCW + */ constexpr uint32_t STREAMING_DEFAULT_CHUNK_SIZE = 2 * 1024 * 1024; // not strict ... @@ -32,27 +41,81 @@ constexpr uint32_t STREAMING_DEFAULT_CHUNK_SIZE = */ class StreamingLayoutCW : public ChunksWriter { public: + /** + * @param[in] writer Writer object for use when writing messages + * @param[in] chunk_size The chunk size to use, this arg is optional. + */ StreamingLayoutCW(Writer& writer, uint32_t chunk_size = STREAMING_DEFAULT_CHUNK_SIZE); - void saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) override; + /** + * @copydoc ChunksWriter::save_message + * + * @throws std::logic_error Exception on inconsistent timestamps. + */ + void save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& buf) override; + + /** + * @copydoc ChunksWriter::finish + */ void finish() override; - uint32_t chunk_size() const override { return chunk_size_; } + /** + * @copydoc ChunksWriter::chunk_size + */ + uint32_t chunk_size() const override; private: + /** + * Internal method to calculate and append the stats + * for a specific set of new messages. + * + * @param[in] stream_id The stream id to associate with the message. + * @param[in] ts The timestamp for the messages. + * @param[in] msg_buf A vector of message buffers to gather stats about. + */ void stats_message(const uint32_t stream_id, const ts_t ts, const std::vector& msg_buf); + + /** + * Finish out a chunk and write the chunk to the writer. + * + * @param[in] stream_id The stream id finish up. + * @param[in] chunk_builder The chunk builder to use for formulating the + * chunk. + */ void finish_chunk(uint32_t stream_id, const std::shared_ptr& chunk_builder); + /** + * Chunk size to use for writing. + */ const uint32_t chunk_size_; + + /** + * Per stream_id chunk builders. + * Map Format: + */ std::map> chunk_builders_{}; + + /** + * Vector pairs for chunk info/stream_id + * Pair Format: + */ std::vector> chunk_stream_id_{}; + + /** + * Per stream_id stats. + * Map Format: + */ std::map stream_stats_{}; + + /** + * Internal writer object to use for writing. + */ Writer& writer_; }; } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/include/ouster/osf/meta_extrinsics.h b/ouster_osf/include/ouster/osf/meta_extrinsics.h index 66491051..8f896064 100644 --- a/ouster_osf/include/ouster/osf/meta_extrinsics.h +++ b/ouster_osf/include/ouster/osf/meta_extrinsics.h @@ -20,51 +20,117 @@ namespace osf { /** * Metadata entry to store sensor Extrinsics. * - * @verbatim - * Fields: - * extrinsics: mat4d - 4x4 homogeneous transform - * ref_meta_id: uint32_t - reference to other metadata entry, typically - * LidarSensor - * name: string - named id if needed, to support multiple extrinsics per - * object (i.e. LidarSensor, or Gps) with name maybe used - * to associate extrinsics to some external system of - * records or just name the source originator of the - * extrinsics information. - * * OSF type: * ouster/v1/os_sensor/Extrinsics * - * Flatbuffer definition file: + * Flat Buffer Reference: * fb/os_sensor/extrinsics.fbs - * @endverbatim - * */ class Extrinsics : public MetadataEntryHelper { public: + /** + * @param[in] extrinsics ///< The extrinsic matrix to store + * ///< mat4d - 4x4 homogeneous transform + * @param[in] ref_meta_id The flat buffer metadata(not sensor_info) + * reference id + * @param[in] name ///< Named id if needed, to support multiple extrinsics + * ///< perobject (i.e. LidarSensor, or Gps) with name + * ///< maybe usedto associate extrinsics to some external + * ///< system of records or just name the source + * ///< originator of the extrinsics information. + */ explicit Extrinsics(const mat4d& extrinsics, uint32_t ref_meta_id = 0, - const std::string& name = "") - : extrinsics_(extrinsics), ref_meta_id_{ref_meta_id}, name_{name} {} - const mat4d& extrinsics() const { return extrinsics_; } - const std::string& name() const { return name_; } - uint32_t ref_meta_id() const { return ref_meta_id_; } + const std::string& name = ""); + + /** + * Get the extrinsics matrix. + * + * @return The eigen extrinsics matrix. + */ + const mat4d& extrinsics() const; + /** + * Get the extrinsics name. + * + * @return The extrinsics name. + */ + const std::string& name() const; + + /** + * Get the reference metadata id. + * + * @return The reference metadata id. + */ + uint32_t ref_meta_id() const; + + /** + * @copydoc MetadataEntry::buffer + */ std::vector buffer() const final; + /** + * Create an Extrinsics object from a byte array. + * + * @todo Figure out why this wasnt just done as a constructor overload. + * + * @relates MetadataEntry::from_buffer + * + * @param[in] buf The byte vector to construct an Extrinsics object from. + * @return The new Extrinsics cast as a MetadataEntry + */ static std::unique_ptr from_buffer( const std::vector& buf); + /** + * Get the string representation for the Extrinsics object. + * + * @relates MetadataEntry::repr + * + * @return The string representation for the Extrinsics object. + */ std::string repr() const override; private: + /** + * The internal extrinsics array. + * + * Flat Buffer Reference: + * fb/os_sensor/extrinsics.fbs :: Extrinsics :: extrinsics + */ mat4d extrinsics_; + + /** + * The internal flatbuffer metadata reference id. + * + * Flat Buffer Reference: + * fb/os_sensor/extrinsics.fbs :: Extrinsics :: ref_id + */ uint32_t ref_meta_id_; + + /** + * The internal name for the extrinsics array. + * + * Flat Buffer Reference: fb/os_sensor/extrinsics.fbs :: Extrinsics :: name + */ std::string name_; }; +/** @defgroup OSFTraitsExtrinsics OSF Templated traits struct. */ + +/** + * Templated struct for returning the OSF type string. + * + * @ingroup OSFTraitsExtrinsics + */ template <> struct MetadataTraits { + /** + * Return the OSF type string. + * + * @return The OSF type string "ouster/v1/os_sensor/Extrinsics". + */ static const std::string type() { return "ouster/v1/os_sensor/Extrinsics"; } }; } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/include/ouster/osf/meta_lidar_sensor.h b/ouster_osf/include/ouster/osf/meta_lidar_sensor.h index eebf1825..698929d1 100644 --- a/ouster_osf/include/ouster/osf/meta_lidar_sensor.h +++ b/ouster_osf/include/ouster/osf/meta_lidar_sensor.h @@ -20,66 +20,105 @@ namespace osf { /** * Metadata entry to store lidar sensor_info, i.e. Ouster sensor configuration. * - * @verbatim - * Fields: - * metadata: string - lidar metadata in json - * * OSF type: * ouster/v1/os_sensor/LidarSensor * - * Flatbuffer definition file: + * Flat Buffer Reference: * fb/os_sensor/lidar_sensor.fbs - * @endverbatim - * */ class LidarSensor : public MetadataEntryHelper { using sensor_info = ouster::sensor::sensor_info; public: - /// TODO]pb]: This is soft DEPRECATED until we have an updated sensor_info, - /// since we are not encouraging storing the serialized metadata - explicit LidarSensor(const sensor_info& si) - : sensor_info_(si), metadata_(si.original_string()) { - throw std::invalid_argument( - "\nERROR: `osf::LidarSensor()` constructor accepts only " - "metadata_json " - "(full string of the file metadata.json or what was received from " - "sensor) and not a `sensor::sensor_info` object.\n\n" - "We are so sorry that we deprecated it's so hardly but the thing " - "is that `sensor::sensor_info` object doesn't equal the original " - "metadata.json file (or string) that we used to construct it.\n" - "However, Data App when tries to get metadata from OSF looks for " - "fields (like `image_rev`) that only present in metadata.json but " - "not `sensor::sensor_info` which effectively leads to OSF that " - "couldn't be uploaded to Data App.\n"); - } + /** + * @param[in] si Initialize the LidarSensor with a sensor_info object. + */ + explicit LidarSensor(const sensor_info& si); - explicit LidarSensor(const std::string& sensor_metadata) - : sensor_info_(sensor::parse_metadata(sensor_metadata)), - metadata_(sensor_metadata) {} + /** + * @param[in] sensor_metadata Initialize the LidarSensor with a json string + * representation of the sensor_info object. + */ + explicit LidarSensor(const std::string& sensor_metadata); - const sensor_info& info() const { return sensor_info_; } + /** + * Returns the sensor_info associated with the LidarSensor. + * + * @return The sensor_info associated with the LidarSensor. + */ + const sensor_info& info() const; - const std::string& metadata() const { return metadata_; } - - // === Simplified with MetadataEntryHelper: type()+clone() - // std::string type() const override; - // std::unique_ptr clone() const override; + /** + * Returns the json string representation sensor_info associated + * with the LidarSensor. + * + * @return ///< The json string representation of the + * ///< sensor_info object. + */ + const std::string& metadata() const; + /** + * @copydoc MetadataEntry::buffer + */ std::vector buffer() const final; + /** + * Create a LidarSensor object from a byte array. + * + * @todo Figure out why this wasnt just done as a constructor overload. + * + * @relates MetadataEntry::from_buffer + * + * @param[in] buf The raw flatbuffer byte vector to initialize from. + * @return The new LidarSensor cast as a MetadataEntry + */ static std::unique_ptr from_buffer( const std::vector& buf); + /** + * Get the string representation for the LidarSensor object. + * + * @relates MetadataEntry::repr + * + * @return The string representation for the LidarSensor object. + */ std::string repr() const override; + /** + * @todo Figure out why we have both repr and to_string + * + * @relates MetadataEntry::to_string + * + * @copydoc LidarSensor::repr + */ + std::string to_string() const override; + private: + /** + * The internal sensor_info object. + */ sensor_info sensor_info_; + + /** + * The internal json string representation of the sensor_info object. + */ const std::string metadata_; }; +/** @defgroup OSFTraitsLidarSensor Templated struct for traits */ + +/** + * Templated struct for returning the OSF type string. + * + * @ingroup OSFTraitsLidarSensor + */ template <> struct MetadataTraits { + /** + * Return the OSF type string. + * + * @return The OSF type string "ouster/v1/os_sensor/LidarSensor". + */ static const std::string type() { return "ouster/v1/os_sensor/LidarSensor"; } diff --git a/ouster_osf/include/ouster/osf/meta_streaming_info.h b/ouster_osf/include/ouster/osf/meta_streaming_info.h index d5802a83..cb92cd84 100644 --- a/ouster_osf/include/ouster/osf/meta_streaming_info.h +++ b/ouster_osf/include/ouster/osf/meta_streaming_info.h @@ -17,84 +17,234 @@ namespace ouster { namespace osf { +/** + * Class for keeping track of OSF chunks. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo + */ struct ChunkInfo { + /** + * The offset in the flatbuffer where + * the chunk is located. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo :: offset + */ uint64_t offset; + + /** + * The specific stream the chunk is associated with. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo :: stream_id + */ uint32_t stream_id; + + /** + * The number of messages in the chunk + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo :: message_count + */ uint32_t message_count; }; +/** + * Class for keeping track of OSF stream stats. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats + */ struct StreamStats { + /** + * The specific stream the chunk is associated with. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats :: stream_id + */ uint32_t stream_id; + + /** + * The first timestamp in the stream. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats :: start_ts + */ ts_t start_ts; + + /** + * The last timestamp in the stream. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats :: end_ts + */ ts_t end_ts; + + /** + * The number of messages in the stream. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats :: message_count + */ uint64_t message_count; + + /** + * The average size of the messages in the stream. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamStats :: message_avg_size + */ uint32_t message_avg_size; + + /** + * Default constructor, sets everthing to 0. + */ StreamStats() = default; - StreamStats(uint32_t s_id, ts_t t, uint32_t msg_size) - : stream_id{s_id}, - start_ts{t}, - end_ts{t}, - message_count{1}, - message_avg_size{msg_size} {}; - void update(ts_t t, uint32_t msg_size) { - if (start_ts > t) start_ts = t; - if (end_ts < t) end_ts = t; - ++message_count; - int avg_size = static_cast(message_avg_size); - avg_size = avg_size + (static_cast(msg_size) - avg_size) / - static_cast(message_count); - message_avg_size = static_cast(avg_size); - } + + /** + * Construct a StreamStats with the specified values + * + * @param[in] s_id Specify the stream_id to use. + * @param[in] t Set the start and end timestamps to the specified value. + * @param[in] msg_size Set the average message size to the specified value. + */ + StreamStats(uint32_t s_id, ts_t t, uint32_t msg_size); + + /** + * Update values within the StreamStats + * + * @param[in] t Add another timestamp and calculate the start and end + * values. + * @param[in] msg_size Add another message size and calculate the average. + */ + void update(ts_t t, uint32_t msg_size); }; +/** + * Get the string representation for a ChunkInfo object. + * + * @return The string representation for a ChunkInfo object. + */ std::string to_string(const ChunkInfo& chunk_info); + +/** + * Get the string representation for a StreamStats object. + * + * @return The string representation for a StreamStats object. + */ std::string to_string(const StreamStats& stream_stats); /** * Metadata entry to store StreamingInfo, to support StreamingLayout (RFC 0018) * - * @verbatim - * Fields: - * chunks_info: chunk -> stream_id map - * stream_stats: stream statistics of messages in file - * * OSF type: * ouster/v1/streaming/StreamingInfo * - * Flatbuffer definition file: - * fb/streaming/streaming_info.fbs - * @endverbatim - * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamingInfo */ class StreamingInfo : public MetadataEntryHelper { public: StreamingInfo() {} + /** + * @param[in] chunks_info Vector containing pairs of + * stream_id/ChunkInfo + * to be used to generate a stream_id/ChunkInfo + * map. + * @param[in] stream_stats Vector containing pairs of + * stream_id/StreamStats + * to be used to generate a + * stream_id/StreamStats map. + */ StreamingInfo( const std::vector>& chunks_info, - const std::vector>& stream_stats) - : chunks_info_{chunks_info.begin(), chunks_info.end()}, - stream_stats_{stream_stats.begin(), stream_stats.end()} {} + const std::vector>& stream_stats); - std::map& chunks_info() { return chunks_info_; } - std::map& stream_stats() { return stream_stats_; } + /** + * @param[in] chunks_info ///< Map containing stream_id/ChunkInfo data. + * @param[in] stream_stats ///< Map containing stream_id/StreamStats data. + */ + StreamingInfo(const std::map& chunks_info, + const std::map& stream_stats); + /** + * Return the chunk_info map. stream_id/ChunkInfo data. + * + * @return The chunk_info map. stream_id/ChunkInfo data. + */ + std::map& chunks_info(); + + /** + * Return the stream stat map. stream_id/StreamStats data. + * + * @return The stream stat map. stream_id/StreamStats data. + */ + std::map& stream_stats(); + + /** + * @copydoc MetadataEntry::buffer + */ std::vector buffer() const override final; + + /** + * Create a StreamingInfo object from a byte array. + * + * @todo Figure out why this wasnt just done as a constructor overload. + * + * @relates MetadataEntry::from_buffer + * + * @param[in] buf The raw flatbuffer byte vector to initialize from. + * @return The new StreamingInfo cast as a MetadataEntry + */ static std::unique_ptr from_buffer( const std::vector& buf); + + /** + * Get the string representation for the LidarSensor object. + * + * @relates MetadataEntry::repr + * + * @return The string representation for the LidarSensor object. + */ std::string repr() const override; private: + /** + * The internal stream_id to ChunkInfo map. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamingInfo :: chunks + */ std::map chunks_info_{}; + + /** + * The internal stream_id to StreamStats map. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: StreamingInfo :: stream_stats + */ std::map stream_stats_{}; }; +/** @defgroup OSFTraitsStreamingInfo Templated struct for traits. */ +/** + * Templated struct for returning the OSF type string. + * + * @ingroup OSFTraitsStreamingInfo + */ template <> struct MetadataTraits { + /** + * Return the OSF type string. + * + * @return The OSF type string "ouster/v1/streaming/StreamingInfo". + */ static const std::string type() { return "ouster/v1/streaming/StreamingInfo"; } }; } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/include/ouster/osf/metadata.h b/ouster_osf/include/ouster/osf/metadata.h index 2082a169..c93a3bb8 100644 --- a/ouster_osf/include/ouster/osf/metadata.h +++ b/ouster_osf/include/ouster/osf/metadata.h @@ -9,30 +9,46 @@ #pragma once #include +#include #include #include #include +#include #include #include #include "flatbuffers/flatbuffers.h" #include "ouster/osf/basics.h" +/// @todo fix api docs in this file +/// @todo add equality operators namespace ouster { namespace osf { /** * Need to be specialized for every derived MetadataEntry class that can be * stored/recovered as metadata object. + * * @sa metadata_type(), MetadataEntry + * + * @tparam MetadataDerived The derived subclass cpp type. */ template struct MetadataTraits { + /** + * Default type returning nullptr. + * + * @todo Possible undefined behavior here. + * + * @returns nullptr + */ static const std::string type() { return nullptr; } }; /** * Helper function that returns the MetadataEntry type of concrete metadata. + * + * @tparam MetadataDerived The derived subclass cpp type. */ template inline const std::string metadata_type() { @@ -85,51 +101,80 @@ class MetadataEntry { std::unique_ptr (*)(const std::vector&); /** - * Type of the metadata, used to identify the object type in serialized OSF - * and as key in deserialization registry + * @return Type of the metadata, used to identify the object type in + * serialized OSF and as key in deserialization registry */ virtual std::string type() const = 0; /** - * Same as type with the difference that type() can be dynamic and - * static_type() should always be defined in compile time. - * NOTE: Introduced as a convenience/(HACK?) to simpler reconstruct and cast - * dynamic objects from MetadataEntryRef + * @return Same as type with the difference that type() can be dynamic and + * static_type() should always be defined in compile time. + * NOTE: Introduced as a convenience/(HACK?) to simpler reconstruct + * and cast dynamic objects from MetadataEntryRef */ virtual std::string static_type() const = 0; /** * Should be provided by derived class and is used in handling polymorphic * objects and avoid object slicing + * + * @return Should return a clone of the current MetadataEntry */ virtual std::unique_ptr clone() const = 0; /** - * byte represantation of the internal derived metadata type, used as - * serialization function when saving to OSF file + * Byte represantation of the internal derived metadata type, used as + * serialization function when saving to OSF file. + * + * @return The byte vector representation of the metadata. */ virtual std::vector buffer() const = 0; /** - * recover metadata object from the bytes representation if possible. + * Recover metadata object from the bytes representation if possible. * If recovery is not possible returns nullptr + * + * @param[in] buf The buffer to recover the metadata object from. + * @param[in] type_str The type string from the derived type. + * @return A new object of the derived type cast as a MetadataEntry */ static std::unique_ptr from_buffer( const std::vector& buf, const std::string type_str); /** - * string representation of the internal metadata object, used in + * String representation of the internal metadata object, used in * to_string() for debug/info outputs. + * + * @return The string representation for the internal metadata object. */ virtual std::string repr() const; /** - * string representation of the whole metadata entry with type and id + * String representation of the whole metadata entry with type and id. + * + * @todo Figure out why we have both repr and to_string + * + * @return The string representation of the whole metadata entry. */ virtual std::string to_string() const; - void setId(uint32_t id) { id_ = id; } - uint32_t id() const { return id_; } + /** + * Unique id used inside the flatbuffer metadata store to refer to + * metadata entries. + * + * @param[in] id The unique id to set. + */ + void setId(uint32_t id); + + /** + * Unique id used inside the flatbuffer metadata store to refer to + * metadata entries. + * + * @relates setId + * + * @return The unique id of this object. + */ + uint32_t id() const; /** * Casting of the base class to concrete derived metadata entry type. @@ -137,6 +182,10 @@ class MetadataEntry { * is a polymorphic object, or as reconstruction from buffer() * representation when it used from MetadataEntryRef (i.e. wrapper on * underlying bytes) + * + * @tparam T The derived metadata type + * @return A unique pointer to the derived metadata object, nullptr on + * error. */ template std::unique_ptr as() const { @@ -148,6 +197,10 @@ class MetadataEntry { m = T::from_buffer(buffer()); } if (m != nullptr) { + // Verify the casting + T& test = dynamic_cast(*m); + (void)test; + m->setId(id()); // NOTE: Little bit crazy unique_ptr cast (not absolutely // correct because of no deleter handled). But works @@ -161,21 +214,29 @@ class MetadataEntry { /** * Implementation details that emits buffer() content as proper * Flatbuffer MetadataEntry object. + * + * @param[in] fbb The flatbuffer builder to use to make the entry. + * @return An offset into a flatbuffer for the new entry. */ flatbuffers::Offset make_entry( flatbuffers::FlatBufferBuilder& fbb) const; /** - * Registry that holds from_buffer function by type string and used - * during deserialization. + * Method to return the registry that holds from_buffer function by + * type string and is used during deserialization. The registry is + * a static variable defined within the get_registry method. + * + * @return The static registry used to register metadata types. */ static std::map& get_registry(); virtual ~MetadataEntry() = default; private: - // id as its stored in metadata OSF and used for linking between other - // metadata object and messages to streams + /** + * Id as its stored in metadata OSF and used for linking between other + * metadata object and messages to streams. + */ uint32_t id_{0}; }; @@ -183,6 +244,11 @@ class MetadataEntry { * Safe and convenient cast of shared_ptr to concrete derived * class using either shortcut (dynamic_pointer_cast) when it's save to do so * or reconstructs a new copy of the object from underlying data. + * + * @tparam MetadataDerived The cpp type of the derived object. + * @tparam MetadataBase The cpp type of the metadata base. + * @param[in] m The MetadataBase to convert to MetadataDerived. + * @return The MetadataBase cast as a MetadataDerived pointer. */ template std::shared_ptr metadata_pointer_as( @@ -199,17 +265,88 @@ std::shared_ptr metadata_pointer_as( * Registrar class helper to add static from_buffer() function of the concrete * derived metadata class to the registry. * + * @dot + * digraph { + * subgraph cluster_SpecificMetadataClass { + * SpecificMetadataClass [ + * label="class SpecificMetadataClass", + * shape="rectangle"]; + * SpecificMetadataClassType [ + * label="struct MetadataTraits", + * shape="rectangle" + * ]; + * + * SpecificMetadataClass -> SpecificMetadataClassType; + * } + * + * MetadataEntryHelper [ + * label="class MetadataEntryHelper", + * shape="rectangle"]; + * MetadataTraits [ + * label="struct MetadataTraits", + * shape="rectangle"]; + * + * SpecificMetadataClass -> MetadataEntryHelper; + * SpecificMetadataClassType -> MetadataTraits; + * + * MetadataEntry [ + * label="MetadataEntry", + * shape="rectangle"]; + * MetadataEntryRef [ + * label="MetadataEntryRef", + * shape="rectangle"]; + * + * MetadataEntry -> MetadataEntryRef; + * + * subgraph cluster_RegisterMetadata { + * RegisterMetadata [ + * label="RegisterMetadata", + * shape="rectangle"]; + * RegisterMetadata_Decoder [ + * label="RegisterMetadata::registered=register_type_decoder()", + * shape="rectangle"]; + * RegisterMetadata->RegisterMetadata_Decoder; + * }; + * + * MetadataEntryHelper -> MetadataEntry; + * MetadataEntryHelper -> RegisterMetadata; + * + * subgraph cluster_MetadataStore { + * MetadataStore [ + * label="MetadataStore", + * shape="rectangle"]; + * MetadataStore_Entries [ + * label="MetadataStore::metadata_entries_", + * shape="rectangle"]; + * MetadataStore->MetadataStore_Entries; + * }; + * + * MetadataEntry -> MetadataStore_Entries; + * } + * @enddot + * + * @tparam MetadataDerived The derived subclass cpp type. */ template struct RegisterMetadata { virtual ~RegisterMetadata() { - if (!registered_) { - std::cerr << "ERROR: Can't be right! We shouldn't be here. " - "Duplicate metadata types?" - << std::endl; - std::abort(); - } + assert(registered_); + + /** + * This line is incredibly IMPORTANT. This line ensures + * that the compiler does not optimize out the side effects + * from the register_type_decoder method. Without this line + * the MetadataEntry registry will be empty. + */ + (void)registered_; } + + /** + * Register the specific derived class decoder into the global registrar. + * + * @return true If class has been registered successfully, + * false otherwise. + */ static bool register_type_decoder() { auto& registry = MetadataEntry::get_registry(); auto type = metadata_type(); @@ -222,8 +359,20 @@ struct RegisterMetadata { registry.insert(std::make_pair(type, MetadataDerived::from_buffer)); return true; } + + /** + * If the derived class has been registered. + */ static const bool registered_; }; + +/** + * This line is incredibly IMPORTANT. This will statically + * run the registration for all derived classes before the class + * constructer is run. + * + * @tparam MetadataDerived The derived subclass cpp type. + */ template const bool RegisterMetadata::registered_ = RegisterMetadata::register_type_decoder(); @@ -235,17 +384,33 @@ const bool RegisterMetadata::registered_ = * Also registers the from_buffer() function for deserializer registry via * RegisterMetadata helper trick. * + * @tparam DerivedMetadataEntry The derived Metadata Entry type. */ template class MetadataEntryHelper : public MetadataEntry, RegisterMetadata { public: + /** + * Return the metadata type string for the specific derived class. + * + * @return The specific type string for the derived class. + */ std::string type() const override { return metadata_type(); } + + /** + * @copydoc type() + */ std::string static_type() const override { return metadata_type(); } + + /** + * Clone the specific derived metadata object. + * + * @return The cloned MetadataEntry object. + */ std::unique_ptr clone() const override { return std::make_unique( *dynamic_cast(this)); @@ -270,35 +435,62 @@ class MetadataEntryRef : public MetadataEntry { /** * Creates the metadata reference from Flatbuffers v2::MetadataEntry buffer. * No copy involved. + * + * @param[in] buf The buffer to create the MetadataEntryRef from. */ - explicit MetadataEntryRef(const uint8_t* buf) : buf_{buf} { - const gen::MetadataEntry* meta_entry = - reinterpret_cast(buf_); - buf_type_ = meta_entry->type()->str(); - setId(meta_entry->id()); - } + explicit MetadataEntryRef(const uint8_t* buf); - std::string type() const override { return buf_type_; } - std::string static_type() const override { - return metadata_type(); - } + /** + * Return the type of the MetadataEntry. + * + * @return The type of the MetadataEntry. + */ + std::string type() const override; - std::unique_ptr clone() const override { - return std::make_unique(*this); - } + /** + * @copydoc type() + */ + std::string static_type() const override; + /** + * Clone the MetadataEntry. + * + * @return The cloned MetadataEntry object. + */ + std::unique_ptr clone() const override; + + /** + * Return the raw underlying buffer for the MetadataEntryRef. + * + * @return The raw underlying byte vector. + */ std::vector buffer() const final; /** * Reconstructs the object as concrete metadata of type() from the * buffer() using registered deserialization function from_buffer() of * current type + * + * @return The reconstructed object. */ std::unique_ptr as_type() const; private: - void setId(uint32_t id) { MetadataEntry::setId(id); } + /** + * Internal method to set the specific metadata entry id. + * + * @param[in] id The metadata id to set. + */ + void setId(uint32_t id); + + /** + * Data pointer to the raw MetadataEntry buffer. + */ const uint8_t* buf_; + + /** + * Internal variable for storing the metadata type string. + */ std::string buf_type_{}; }; @@ -308,6 +500,12 @@ class MetadataEntryRef : public MetadataEntry { */ template <> struct MetadataTraits { + /** + * Implementation detail for MetadataEntryRef to distinguish it from + * any possible metadata type. + * + * @return The type string "impl/MetadataEntryRef". + */ static const std::string type() { return "impl/MetadataEntryRef"; } }; @@ -318,42 +516,37 @@ struct MetadataTraits { * Provide functions to retrieve concrete metadata types by id or by type. * * Also can serialize itself to Flatbuffers collection of metadata. - * */ class MetadataStore { + /** + * Metadata id to MetadataEntry map. + */ using MetadataEntriesMap = std::map>; public: using key_type = MetadataEntriesMap::key_type; - uint32_t add(MetadataEntry&& entry) { return add(entry); } - - uint32_t add(MetadataEntry& entry) { - if (entry.id() == 0) { - /// @todo [pb]: Figure out the whole sequence of ids in addMetas in - /// the Reader case - assignId(entry); - } else if (metadata_entries_.find(entry.id()) != - metadata_entries_.end()) { - std::cout << "WARNING: MetadataStore: ENTRY EXISTS! id = " - << entry.id() << std::endl; - return entry.id(); - } else if (next_meta_id_ == entry.id()) { - // Find next available next_meta_id_ so we avoid id collisions - ++next_meta_id_; - auto next_it = metadata_entries_.lower_bound(next_meta_id_); - while (next_it != metadata_entries_.end() && - next_it->first == next_meta_id_) { - ++next_meta_id_; - next_it = metadata_entries_.lower_bound(next_meta_id_); - } - } + /** + * Add a specified MetadataEntry to the store + * + * @param[in] entry The entry to add to the store. + */ + uint32_t add(MetadataEntry&& entry); - metadata_entries_.emplace(entry.id(), entry.clone()); - return entry.id(); - } + /** + * @copydoc add(MetadataEntry&& entry) + */ + uint32_t add(MetadataEntry& entry); + /** + * Get the first specified MetadataEntry associated to the + * template parameter. + * + * @tparam MetadataEntryClass The metadata cpp type to try and + * retrieve. + * @return The MetadataEntry of type MetadataEntryClass if it exists. + */ template std::shared_ptr get() const { auto it = metadata_entries_.begin(); @@ -366,6 +559,14 @@ class MetadataStore { return nullptr; } + /** + * Count the number of specified MetadataEntry associated to the + * template parameter. + * + * @tparam MetadataEntryClass The metadata cpp type to try and + * count. + * @return The count type MetadataEntryClass. + */ template size_t count() const { auto it = metadata_entries_.begin(); @@ -378,18 +579,41 @@ class MetadataStore { return cnt; } + /** + * Get the specified MetadataEntry associated to the + * template parameter and metadata_id. + * + * @tparam MetadataEntryClass The metadata cpp type to try and + * retrieve. + * @param[in] metadata_id The id to try and return the associated entry. + * @return The MetadataEntryClass. + */ template std::shared_ptr get(const uint32_t metadata_id) const { auto meta_entry = get(metadata_id); return metadata_pointer_as(meta_entry); } + /** + * Get the specified MetadataEntry associated to the + * metadata_id. + * + * @param[in] metadata_id The id to try and return the associated entry. + * @return The MetadataEntry. + */ std::shared_ptr get(const uint32_t metadata_id) const { auto it = metadata_entries_.find(metadata_id); if (it == metadata_entries_.end()) return nullptr; return it->second; } + /** + * Return a map containing all of the MetadataEntries that match + * the specified template class. + * + * @tparam MetadataEntryClass The metadata cpp type to try and retrieve. + * @return The MetadataEntry map. + */ template std::map> find() const { std::map> res; @@ -403,17 +627,46 @@ class MetadataStore { return res; } - size_t size() const { return metadata_entries_.size(); } + /** + * Return the number of MetadataEntries. + * + * @return The number of MetadataEntry objects. + */ + size_t size() const; - const MetadataEntriesMap& entries() const { return metadata_entries_; } + /** + * Return the entire map of MetadataEntry. + * + * @return The entire map of MetadataEnty objects. + */ + const MetadataEntriesMap& entries() const; + /** + * Serialize the MetadataStore to the specified flatbuffer builder + * and return the resulting byte vector. + * + * @param[in] fbb The flatbuffer builder to use. + * @return The resulting serialized byte vector. + */ std::vector> make_entries(flatbuffers::FlatBufferBuilder& fbb) const; private: - void assignId(MetadataEntry& entry) { entry.setId(next_meta_id_++); } + /** + * Assign and increment an id to the entry. + * + * @param[in] entry The entry to assign a generated id to. + */ + void assignId(MetadataEntry& entry); + /** + * The autogenerated meta id variable. + */ uint32_t next_meta_id_{1}; + + /** + * The internal storage for all of the metadata entries. + */ MetadataEntriesMap metadata_entries_{}; }; diff --git a/ouster_osf/include/ouster/osf/operations.h b/ouster_osf/include/ouster/osf/operations.h index 6f5b4821..14030b48 100644 --- a/ouster_osf/include/ouster/osf/operations.h +++ b/ouster_osf/include/ouster/osf/operations.h @@ -11,18 +11,20 @@ #include #include "ouster/osf/basics.h" +#include "ouster/osf/metadata.h" +#include "ouster/types.h" +/// @todo fix parameter directions in api doc namespace ouster { namespace osf { /** * Outputs OSF v2 metadata + header info in JSON format. * - * @param file OSF file (only v2 supported) - * @param full flag print full information (i.e. chunks_offset and decoded - * metas) + * @param[in] file OSF file (only v2 supported) + * @param[in] full flag print full information (i.e. chunks_offset and decoded + * metas) * @return JSON formatted string of the OSF metadata + header - * */ std::string dump_metadata(const std::string& file, bool full = true); @@ -30,19 +32,42 @@ std::string dump_metadata(const std::string& file, bool full = true); * Reads OSF file and prints (STDOUT) messages types, timestamps and * overall statistics per message type. * - * @param file OSF file - * @param with_decoding decode known messages (used to time a - * reading + decoding together) - * + * @param[in] file OSF file + * @param[in] with_decoding decode known messages (used to time a + * reading + decoding together) */ void parse_and_print(const std::string& file, bool with_decoding = false); /** - * Convert pcap with a single sensor stream to OSF. + * Backup the metadata blob in an OSF file. + * + * @param[in] osf_file_name The OSF file to backup from. + * @param[in] backup_file_name The path to store the metadata blob backup. + * @return The number of the bytes written to the backup file. + */ +int64_t backup_osf_file_metablob(const std::string& osf_file_name, + const std::string& backup_file_name); + +/** + * Restore an OSF metadata blob from a backup file. + * + * @param[in] osf_file_name The OSF file to restore to. + * @param[in] backup_file_name The path to the metadata blob backup. + * @return The number of the bytes written to the OSF file. + */ +int64_t restore_osf_file_metablob(const std::string& osf_file_name, + const std::string& backup_file_name); + +/** + * Modify an OSF files sensor_info metadata. + * + * @param[in] file_name The OSF file to modify. + * @param[in] new_metadata The new metadata for the OSF file + * @return The number of the bytes written to the OSF file. */ -bool pcap_to_osf(const std::string& pcap_filename, - const std::string& meta_filename, int lidar_port, - const std::string& osf_filename, int chunk_size = 0); +int64_t osf_file_modify_metadata( + const std::string& file_name, + const std::vector& new_metadata); } // namespace osf } // namespace ouster diff --git a/ouster_osf/include/ouster/osf/pcap_source.h b/ouster_osf/include/ouster/osf/pcap_source.h index 0bd2e7f3..85d30244 100644 --- a/ouster_osf/include/ouster/osf/pcap_source.h +++ b/ouster_osf/include/ouster/osf/pcap_source.h @@ -4,7 +4,6 @@ * * @file pcap_source.h * @brief Pcap raw data source - * */ #pragma once @@ -28,21 +27,38 @@ class PcapRawSource { public: using ts_t = std::chrono::nanoseconds; - /// Lidar data callbacks - using LidarDataHandler = - std::function; + /** + * Lidar data callbacks + * + * @param[in] timestamp The timestamp for the scan. + * @param[in] scan The LidarScan object. + */ + using LidarDataHandler = std::function; - /// General pcap packet handler + /** + * General pcap packet handler + * + * @param[in] info The sensor_info for the packet. + * @param[in] buf The raw buffer for the packet. + */ using PacketHandler = std::function; + const ouster::sensor_utils::packet_info& info, const uint8_t* buf)>; - // Predicate to control the bag run loop + /** + * Predicate to control the bag run loop + * + * @param[in] info The sensor_info for the packet. + * @return True if the loop should continue, False if the loop should halt. + */ using PacketInfoPredicate = - std::function; + std::function; /** * Opens pcap file and checks available packets inside with * heuristics applied to guess Ouster lidar port with data. + * + * @param[in] filename The filename of the pcap file to open. */ PcapRawSource(const std::string& filename); @@ -50,14 +66,18 @@ class PcapRawSource { * Attach lidar data handler to the port that receives already * batched LidarScans with a timestamp of the first UDP lidar packet. * LidarScan uses default field types by the profile + * + * @param[in] dst_port The destination port for the target lidar stream. + * @param[in] info The sensor info for the stream. + * @param[in] lidar_handler The callback to call on packet. */ void addLidarDataHandler(int dst_port, const ouster::sensor::sensor_info& info, LidarDataHandler&& lidar_handler); /** - * The addLidarDataHandler() function overload. In this function, LidarScan - * uses user modified field types + * @copydoc addLidarDataHandler + * @param[in] ls_field_types The LidarScan field types to use. */ void addLidarDataHandler(int dst_port, const ouster::sensor::sensor_info& info, @@ -75,24 +95,48 @@ class PcapRawSource { * `pred(pinfo) == true`. * `pred` function called before reading packet buffer and passing to the * appropriate handlers. + * + * @param[in] pred The predicate function to decide whether to continue or + * not. */ void runWhile(const PacketInfoPredicate& pred); /** - * Close pcap file + * Close the pcap file. */ ~PcapRawSource(); private: + /// Remove some stuff PcapRawSource(const PcapRawSource&) = delete; PcapRawSource& operator=(const PcapRawSource&) = delete; + /** + * Read current packet and dispatch handlers accordingly. + * + * @param[in] pinfo The new packet info. + */ void handleCurrentPacket(const sensor_utils::packet_info& pinfo); + /** + * The pcap file path. + */ std::string pcap_filename_; + + /** + * The associated sensor_info. + */ ouster::sensor::sensor_info info_; + + /** + * The internal pcap file handler. + */ std::shared_ptr pcap_handle_{ nullptr}; + + /** + * Map containing a 'destination port' to 'handler' mapping. + */ std::map packet_handlers_{}; }; diff --git a/ouster_osf/include/ouster/osf/reader.h b/ouster_osf/include/ouster/osf/reader.h index 0bcbc60b..2fcfaf88 100644 --- a/ouster_osf/include/ouster/osf/reader.h +++ b/ouster_osf/include/ouster/osf/reader.h @@ -18,24 +18,116 @@ namespace ouster { namespace osf { -enum class ChunkValidity { UNKNOWN = 0, VALID, INVALID }; +/** + * Enumerator for dealing with chunk validity. + * + * This is synthesized and thus does not have a reference in the Flat Buffer. + * Value is set in Reader::verify_chunk + */ +enum class ChunkValidity { + UNKNOWN = 0, ///< Validity can not be ascertained. + VALID, ///< Chunk is valid. + INVALID ///< Chunk is invalid. +}; /** - * Chunks state. Validity info and next offset for forward iteration. + * The structure for representing chunk information and + * for forward iteration. + * + * This struct is partially mapped to the Flat Buffer data. + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset */ struct ChunkState { + /** + * The current chunk's offset from the begining of the chunks section. + * + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: offset + */ uint64_t offset; + + /** + * The next chunk's offset for forward iteration. + * Should work like a linked list. + * + * This is partially synthesized from the Flat Buffers. + * This will link up with the next chunks offset. + * Value is set in ChunksPile::link_stream_chunks + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: offset + */ uint64_t next_offset; + + /** + * The first timestamp in the chunk in ordinality. + * + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: start_ts + */ ts_t start_ts; + + /** + * The last timestamp in the chunk in ordinality. + * + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: end_ts + */ ts_t end_ts; + + /** + * The validity of the current chunk + * + * This is synthesized and thus does not have a reference in the Flat + * Buffers. Value is set in Reader::verify_chunk + */ ChunkValidity status; }; +/** + * The structure for representing streaming information. + * + * This struct is partially mapped to the Flat Buffer data. + */ struct ChunkInfoNode { + /** + * The chunk offset from the begining of the chunks section. + * + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: offset + */ uint64_t offset; + + /** + * The next chunk's offset for forward iteration. + * Should work like a linked list. + * + * This is partially synthesized from the Flat Buffers. + * This will link up with the next chunks offset. + * Value is set in ChunksPile::link_stream_chunks + * Flat Buffer Reference: fb/metadata.fbs :: ChunkOffset :: offset + */ uint64_t next_offset; + + /** + * The stream this is associated with. + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo :: stream_id + */ uint32_t stream_id; + + /** + * Total number of messages in a `stream_id` in the whole OSF file + * + * Flat Buffer Reference: + * fb/streaming/streaming_info.fbs :: ChunkInfo :: message_count + */ uint32_t message_count; + + /** + * The index of the start of the message. + * @todo try to describe this better + * + * This is partially synthesized from the Flat Buffers. + * Value is set in ChunksPile::link_stream_chunks + * Synthesized from Flat Buffer Reference: + * fb/metadata.fbs :: ChunkOffset :: message_count + */ uint32_t message_start_idx; }; @@ -44,63 +136,168 @@ struct ChunkInfoNode { */ class ChunksPile { public: - using ChunkStateIter = std::unordered_map::iterator; - using ChunkInfoIter = std::unordered_map::iterator; + /** + * stream_id to offset map. + */ using StreamChunksMap = std::unordered_map>>; - ChunksPile(){}; + /** + * Default blank constructor. + */ + ChunksPile(); + /** + * Add a new chunk to the ChunkPile. + * + * @param[in] offset The offset for the chunk. + * @param[in] start_ts The first timestamp in the chunk. + * @param[in] end_ts The first timestamp in the chunk. + */ void add(uint64_t offset, ts_t start_ts, ts_t end_ts); + + /** + * Return the chunk associated with an offset. + * + * @param[in] offset The offset to return the chunk for. + * @return The chunk if found, or nullptr. + */ ChunkState* get(uint64_t offset); + + /** + * Add a new streaming info to the ChunkPile. + * + * @param[in] offset The offset for the chunk. + * @param[in] stream_id The stream_id associated. + * @param[in] message_count The number of messages. + */ void add_info(uint64_t offset, uint32_t stream_id, uint32_t message_count); + + /** + * Return the streaming info associated with an offset. + * + * @param[in] offset The offset to return the streaming info for. + * @return The streaming info if found, or nullptr. + */ ChunkInfoNode* get_info(uint64_t offset); + + /** + * Return the streaming info associated with a message_idx. + * + * @param[in] stream_id The stream to look for infos in. + * @param[in] message_idx The specific message index to look for. + * @return The streaming info if found, or nullptr. + */ ChunkInfoNode* get_info_by_message_idx(uint32_t stream_id, uint32_t message_idx); + + /** + * Return the chunk associated with a lower bound timestamp. + * + * @param[in] stream_id The stream to look for chunks in. + * @param[in] ts The lower bound for the chunk. + * @return The chunk if found, or nullptr. + */ ChunkState* get_by_lower_bound_ts(uint32_t stream_id, const ts_t ts); + + /** + * Return the next chunk identified by the offset. + * + * @param[in] offset The offset to return the next chunk for. + * @return The chunk if found, or nullptr. + */ ChunkState* next(uint64_t offset); + + /** + * Return the next chunk identified by the offset per stream. + * + * @param[in] offset The offset to return the next chunk for. + * @return The chunk if found, or nullptr. + */ ChunkState* next_by_stream(uint64_t offset); + /** + * Return the first chunk. + * + * @return The chunk if found, or nullptr. + */ ChunkState* first(); - ChunkStateIter begin(); - ChunkStateIter end(); - + /** + * Return the size of the chunk pile. + * + * @return The size of the chunk pile. + */ size_t size() const; - bool has_info() const; + /** + * Return if there is a message index. + * + * @return If there is a message index. + */ bool has_message_idx() const; - StreamChunksMap& stream_chunks() { return stream_chunks_; } + /** + * Return the stream_id to chunk offset map. + * + * @return The stream_id to chunk offset map. + */ + StreamChunksMap& stream_chunks(); - // builds internal links between ChunkInfoNode per stream + /** + * Builds internal links between ChunkInfoNode per stream. + * + * @throws std::logic_error exception on non increasing timestamps. + * @throws std::logic_error exception on non existent info. + */ void link_stream_chunks(); private: + /** + * The offset to chunk state map. + */ std::unordered_map pile_{}; + + /** + * The offset to stream info map. + */ std::unordered_map pile_info_{}; - // ordered list of chunks offsets per stream id (only when ChunkInfo - // is present) + /** + * Ordered list of chunks offsets per stream id (only when ChunkInfo + * is present). + */ StreamChunksMap stream_chunks_{}; }; +/** + * To String Functionality For ChunkState + * + * @param[in] chunk_state The data to get the string representation for + * @return The string representation + */ std::string to_string(const ChunkState& chunk_state); + +/** + * To String Functionality For ChunkInfoNode + * + * @param[in] chunk_info The data to get the string representation format + * @return The string representation + */ std::string to_string(const ChunkInfoNode& chunk_info); +// Forward Decls class Reader; class MessageRef; class ChunkRef; class ChunksPile; class ChunksRange; -struct MessagesStandardIter; struct MessagesStreamingIter; struct MessagesChunkIter; - class MessagesStreamingRange; /** - * Chunks forward iterator in order of offset. + * Chunk forward iterator in order of offset. */ struct ChunksIter { using iterator_category = std::forward_iterator_tag; @@ -109,110 +306,172 @@ struct ChunksIter { using pointer = const std::unique_ptr; using reference = const ChunkRef&; + /** + * Default construction that zeros out member variables. + */ ChunksIter(); + + /** + * Initialize from another ChunksIter object. + * + * @param[in] other The other ChunksIter object to initalize from. + */ ChunksIter(const ChunksIter& other); + + /** + * Default assign operator. + * + * @param[in] other The other ChunksIter to assign to this. + */ ChunksIter& operator=(const ChunksIter& other) = default; + /** + * Return a ChunkRef object associated with this ChunksIter object. + * + * @throws std::logic_error Exception on end of iteration. + * + * @return The ChunkRef object associated with this ChunksIter object. + */ const ChunkRef operator*() const; + + /** + * Return a ChunkRef pointer associated with this ChunksIter object. + * + * @return The ChunkRef pointer associated with this ChunksIter object. + */ const std::unique_ptr operator->() const; + + /** + * Increment the ChunksIter iterator and return *this. + * + * @return The current ChunksIter object. + */ ChunksIter& operator++(); - ChunksIter operator++(int); + + /** + * Equality operator to compare two ChunksIter objects. + * + * @param[in] other The other object to compare. + * @return Whether the two ChunksIter objects are the same. + */ bool operator==(const ChunksIter& other) const; + + /** + * Equality operator to compare two ChunksIter objects. + * + * @relates operator==(const ChunksIter& other) + * @param[in] other The other object to compare. + * @return Whether the two ChunksIter objects are not the same. + */ bool operator!=(const ChunksIter& other) const; + /** + * To String Functionality For ChunksIter. + * + * @return The string representation + */ std::string to_string() const; private: + /** + * Internal constructor. + * + * @param[in] begin_addr The offset in the chunks to start at. + * @param[in] end_addr The offset in the chunks that they end at. + * @param[in] reader The reader object to use for reading. + */ ChunksIter(const uint64_t begin_addr, const uint64_t end_addr, Reader* reader); - // move iterator to the next chunk of "verified" chunks set + + /** + * Move iterator to the next chunk of "verified" chunks. + */ void next(); + + /** + * Move iterator to the next chunk. + */ void next_any(); + + /** + * Verify that the ChunksIter is not at the end, + * and that the current chunk is valid. + */ bool is_cleared(); + /** + * The current offset in the chunks. + */ uint64_t current_addr_; + + /** + * The offset in the chunks that they end at. + */ uint64_t end_addr_; + + /** + * The internal reader object to use for reading. + */ Reader* reader_; + friend class ChunksRange; }; // ChunksIter /** - * Chunks range + * std iterator class for iterating through chunks. */ class ChunksRange { public: + /** + * Begin function for std iterator support. + * + * @return A ChunksIter object for iteration. + */ ChunksIter begin() const; + + /** + * End function for std iterator support. + * + * @return A ChunksIter object for signifying + * the end of iteration. + */ ChunksIter end() const; + /** + * To String Functionality For ChunksRange. + * + * @return The string representation. + */ std::string to_string() const; private: + /** + * Constructor for Reader to call for creating the ChunksRange object. + * + * @param[in] begin_addr The beginning offset into the chunks buffer. + * @param[in] end_addr The end offset into the chunks buffer. + * @param[in,out] reader The Reader object to use for reading. + */ ChunksRange(const uint64_t begin_addr, const uint64_t end_addr, Reader* reader); + /** + * The internal store of the begining offset into the chunks. + */ uint64_t begin_addr_; + + /** + * The internal store of the ending offset into the chunks. + */ uint64_t end_addr_; + + /** + * The internal store of the Reader object used for reading. + */ Reader* reader_; friend class Reader; }; // ChunksRange -/** - * Messages range. - */ -class MessagesStandardRange { - public: - MessagesStandardIter begin() const; - MessagesStandardIter end() const; - - std::string to_string() const; - - private: - MessagesStandardRange(const ChunksIter begin_it, const ChunksIter end_it); - - ChunksIter begin_chunk_it_; - ChunksIter end_chunk_it_; - friend class Reader; -}; // MessagesStandardRange - -/** - * Messages forward iterator to read all messages across chunks span. - */ -struct MessagesStandardIter { - using iterator_category = std::forward_iterator_tag; - using value_type = const MessageRef; - using difference_type = std::ptrdiff_t; - using pointer = const std::unique_ptr; - using reference = const MessageRef&; - - MessagesStandardIter(); - MessagesStandardIter(const MessagesStandardIter& other); - MessagesStandardIter& operator=(const MessagesStandardIter& other) = - default; - - const MessageRef operator*() const; - std::unique_ptr operator->() const; - MessagesStandardIter& operator++(); - MessagesStandardIter operator++(int); - bool operator==(const MessagesStandardIter& other) const; - bool operator!=(const MessagesStandardIter& other) const; - - std::string to_string() const; - - private: - MessagesStandardIter(const ChunksIter begin_it, const ChunksIter end_it, - const size_t msg_idx); - // move iterator to the next msg that passes is_cleared() - void next(); - void next_any(); - // true if the current msg pointer passes is_cleared() test (i.e. valid) - bool is_cleared(); - - ChunksIter current_chunk_it_; - ChunksIter end_chunk_it_; - size_t msg_idx_; - friend class MessagesStandardRange; -}; // MessagesStandardIter - /** * %OSF Reader that simply reads sequentially messages from the OSF file. * @@ -222,24 +481,48 @@ class Reader { public: /** * Creates reader from %OSF file resource. + * + * @param[in] osf_file The OsfFile object to use to read from. */ Reader(OsfFile& osf_file); /** * Creates reader from %OSF file name. + * + * @param[in] file The OSF file path to read from. */ Reader(const std::string& file); /** * Reads the messages from the first OSF chunk in sequental order * till the end. Doesn't support RandomAccess. + * + * @throws std::logic_error Exception on not having sensor_info. + * + * @return The MessageStreamingRange object to iterate + * through the messages. */ MessagesStreamingRange messages(); + /** + * @copydoc messages() + * @param[in] start_ts Specify the start of the timestamps that + * should be iterated through. + * @param[in] end_ts Specify the end of the timestamps that + * should be iterated through. + */ MessagesStreamingRange messages(const ts_t start_ts, const ts_t end_ts); + /** + * @copydoc messages() + * @param[in] stream_ids Filter the message iteration to specific streams. + */ MessagesStreamingRange messages(const std::vector& stream_ids); + /** + * @copydoc messages(const ts_t start_ts, const ts_t end_ts) + * @param[in] stream_ids Filter the message iteration to specific streams. + */ MessagesStreamingRange messages(const std::vector& stream_ids, const ts_t start_ts, const ts_t end_ts); @@ -249,11 +532,13 @@ class Reader { * Requires the OSF with message_counts inside, i.e. has_message_idx() * return ``True``, otherwise return value is always empty (nullopt). * - * @param stream_id[in] stream id on which the message_idx search is - * performed - * @param message_idx[in] the message index (i.e. rank/number) to search for + * @throws std::logic_error Exception on not having sensor_info. + * + * @param[in] stream_id stream id on which the message_idx search is + * performed + * @param[in] message_idx the message index (i.e. rank/number) to search for * @return message timestamp that corresponds to the message_idx in the - * stream_id + * stream_id */ nonstd::optional ts_by_message_idx(uint32_t stream_id, uint32_t message_idx); @@ -264,50 +549,112 @@ class Reader { * * Message counts was added a bit later to the OSF core * (ChunkInfo struct), so this function will be obsolete over time. + * + * @return Whether OSF contains the message counts that are needed for + * ``ts_by_message_idx()`` */ - bool has_message_idx() const { return chunks_.has_message_idx(); }; - - MessagesStandardRange messages_standard(); + bool has_message_idx() const; /** * Reads chunks and returns the iterator to valid chunks only. * NOTE: Every chunk is read in full and validated. (i.e. it's not just * iterator over chunks index) + * + * @return The iterator to valid chunks only. */ ChunksRange chunks(); - /** metadata id */ - std::string id() const; + /** + * Return the metadata id. + * + * @return The metadata id. + */ + std::string metadata_id() const; - /** metadata start ts */ + /** + * Return the lowest timestamp in the ChunksIter. + * + * @return The lowest timestamp in the ChunksIter. + */ ts_t start_ts() const; - /** metadata end ts */ + /** + * Return the highest timestamp in the ChunksIter. + * + * @return The highest timestamp in the ChunksIter. + */ ts_t end_ts() const; - /** Metadata store to get access to all metadata entries. */ - const MetadataStore& meta_store() const { return meta_store_; } + /** + * Return all metadata entries as a MetadataStore + * + * @return All of the metadata entries as a MetadataStore. + */ + const MetadataStore& meta_store() const; - /** if it can be read by stream and in non-decreasing timestamp order. */ + /** + * If the chunks can be read by stream and in non-decreasing timestamp + * order. + * + * @return The chunks can be read by stream and timestamps are sane. + */ bool has_stream_info() const; private: + /** + * Read, parse and store all of the flatbuffer related metadata. + * + * @throws std::logic_error Exception on invalid metadata block. + */ void read_metadata(); - void read_chunks_info(); // i.e. StreamingInfo.chunks[] information - void print_metadata_entries(); + /** + * Verify, store and link all streaming info indicies + * i.e. StreamingInfo.chunks[] information + * + * @throws std::logic_error Exception on invalid chunk size. + */ + void read_chunks_info(); - // Checks the flatbuffers validity of a chunk by chunk offset. - bool verify_chunk(uint64_t chunk_offset); + /** + * Checks the flatbuffers validity of a chunk by chunk offset. + * + * @param[in] chunk_offset Specify the chunk to verify via offset. + * @return The validity of the chunk. + */ + bool verify_chunk(uint64_t chunk_offset); + /** + * Internal OsfFile object used to read the OSF file. + */ OsfFile file_; + /** + * Internal MetadataStore object to hold all of the + * metadata entries. + */ MetadataStore meta_store_{}; + /** + * Internal ChunksPile object to hold all of the + * chunks. + */ ChunksPile chunks_{}; - // absolute offset to the beginning of the chunks in a file. + /** + * Internal indicator of if this file has streaming info + */ + bool has_streaming_info_{false}; + + /** + * Absolute offset to the beginning of the chunks in a file. + */ uint64_t chunks_base_offset_{0}; + + /** + * Internal byte vector containing the raw flatbuffer + * metadata data. + */ std::vector metadata_buf_{}; // NOTE: These classes need an access to private member `chunks_` ... @@ -328,41 +675,82 @@ class MessageRef { /** * The only way to create the MessageRef is to point to the corresponding * byte buffer of the message in OSF file. - * @param meta_provider the metadata store that is used in types - * reconstruction + * + * @param[in] buf The buffer to use to make a MessageRef object. + * @param[in] meta_provider The metadata store that is used in types + * reconstruction */ - MessageRef(const uint8_t* buf, const MetadataStore& meta_provider) - : buf_(buf), meta_provider_(meta_provider), chunk_buf_{nullptr} {} + MessageRef(const uint8_t* buf, const MetadataStore& meta_provider); + /** + * The only way to create the MessageRef is to point to the corresponding + * byte buffer of the message in OSF file. + * + * @param[in] buf The buffer to use to make a MessageRef object. + * @param[in] meta_provider The metadata store that is used in types + * reconstruction + * @param[in,out] chunk_buf The pre-existing chunk buffer to use. + */ MessageRef(const uint8_t* buf, const MetadataStore& meta_provider, - std::shared_ptr> chunk_buf) - : buf_(buf), meta_provider_(meta_provider), chunk_buf_{chunk_buf} {} + std::shared_ptr> chunk_buf); - /** Message stream id */ + /** + * Get the message stream id. + * + * @return The message stream id. + */ uint32_t id() const; - /** Timestamp of the message */ + /** + * Get the timestamp of the message. + * + * @return The timestamp of the message. + */ ts_t ts() const; /// @todo [pb] Type of the stored data (meta of the stream?) // std::string stream_type() const; - /** Pointer to the underlying data */ - const uint8_t* buf() const { return buf_; } + /** + * Get the pointer to the underlying data. + * + * @return The pointer to the underlying data. + */ + const uint8_t* buf() const; - /** Debug string representation */ + /** + * Debug string representation. + * + * @return The string representation of a MessageRef. + */ std::string to_string() const; - /** Checks whether the message belongs to the specified Stream type */ + /** + * Checks whether the message belongs to the specified Stream type. + * + * @tparam Stream The cpp data type to check against. + * @return If the current MessageRef is of type [Stream]. + */ template bool is() const { auto meta = meta_provider_.get(id()); return (meta != nullptr); } + /** + * Checks whether the message belongs to the specified Strean type. + * + * @param[in] type_str The data type in string form to check against. + * @return If the current MessageRef is of type type_str. + */ bool is(const std::string& type_str) const; - /** Reconstructs the underlying data to the class (copies data) */ + /** + * Reconstructs the underlying data to the class (copies data). + * + * @tparam Stream The type of the target data. + * @return A smart pointer to the new object. + */ template std::unique_ptr decode_msg() const { auto meta = meta_provider_.get(id()); @@ -375,15 +763,43 @@ class MessageRef { return Stream::decode_msg(buffer(), *meta, meta_provider_); } + /** + * Get the underlying raw message byte vector. + * + * @return Return the underlying raw message byte vector. + */ std::vector buffer() const; + /** + * Check if two MessageRefs are equal. + * + * @param[in] other The other MessageRef to check against. + * @return If the two MessageRefs are equal. + */ bool operator==(const MessageRef& other) const; + + /** + * Check if two MessageRefs are not equal. + * + * @param[in] other The other MessageRef to check against. + * @return If the two MessageRefs are not equal. + */ bool operator!=(const MessageRef& other) const; private: + /** + * The internal raw byte array. + */ const uint8_t* buf_; + + /** + * The internal store for all of the metadata entries. + */ const MetadataStore& meta_provider_; + /** + * The internal chunk buffer to use. + */ std::shared_ptr chunk_buf_; }; // MessageRef @@ -391,55 +807,174 @@ class MessageRef { * Thin interface class that holds the pointer to the chunk and hides the * messages reading routines. It expects that Chunk was "verified" before * creating a ChunkRef. - * */ class ChunkRef { public: + /** + * Default ChunkRef constructor that just zeros the internal fields. + */ ChunkRef(); + + /** + * @param[in] offset The offset into the chunk array for the specified + * chunk. + * @param[in] reader The reader object to use for reading. + */ ChunkRef(const uint64_t offset, Reader* reader); + /** + * Check if two ChunkRefs are equal. + * + * @param[in] other The other ChunkRef to check against. + * @return If the two ChunkRef are equal. + */ bool operator==(const ChunkRef& other) const; + + /** + * Check if two ChunkRefs are not equal. + * + * @param[in] other The other ChunkRef to check against. + * @return If the two ChunkRef are not equal. + */ bool operator!=(const ChunkRef& other) const; - ChunkState* state() { return reader_->chunks_.get(chunk_offset_); } - const ChunkState* state() const { - return reader_->chunks_.get(chunk_offset_); - } + /** + * Get the ChunkState for the chunk associated with this ChunkRef. + * + * @relates ChunkState + * + * @return The ChunkState associated with this ChunkRef. + */ + ChunkState* state(); - ChunkInfoNode* info() { return reader_->chunks_.get_info(chunk_offset_); } - const ChunkInfoNode* info() const { - return reader_->chunks_.get_info(chunk_offset_); - } + /** + * @copydoc state() + */ + const ChunkState* state() const; + + /** + * Get the ChunkInfoNode for the chunk associated with this ChunkRef. + * + * @relates ChunkInfoNode + * + * @return The ChunkInfoNode associated with this ChunkRef. + */ + ChunkInfoNode* info(); + + /** + * @copydoc info() + */ + const ChunkInfoNode* info() const; + /** + * Begin function for std iterator support. + * + * @return A MessagesChunkIter object for iteration. + */ MessagesChunkIter begin() const; + + /** + * End function for std iterator support. + * + * @return A MessagesChunkIter object for signifying + * the end of iteration. + */ MessagesChunkIter end() const; + /** + * Get the message at a specific index. + * + * @todo Simplify this and any other instance of this + * + * @param[in] msg_idx The message index to get. + * @return The resulting message. + */ const MessageRef operator[](size_t msg_idx) const; + /** + * Get the message smart pointer at a specific index. + * + * @todo Simplify this and any other instance of this + * + * @param[in] msg_idx The message index to get. + * @return The resulting message smart pointer, + * returns nullptr if non existent. + */ std::unique_ptr messages(size_t msg_idx) const; - /** Debug string representation */ + /** + * Debug string representation. + * + * @return The string representation of a ChunkRef. + */ std::string to_string() const; - uint64_t offset() const { return chunk_offset_; } - ts_t start_ts() const { return state()->start_ts; } - ts_t end_ts() const { return state()->end_ts; } + /** + * Return the chunk offset in the larger flatbuffer array. + * + * @return The chunk offset in the larger flatbuffer array. + */ + uint64_t offset() const; + + /** + * The lowest timestamp in the chunk. + * A shortcut for state()->start_ts + * + * @relates state + */ + ts_t start_ts() const; + + /** + * The highest timestamp in the chunk. + * A shortcut for state()->end_ts + * + * @relates state + */ + ts_t end_ts() const; + /** + * Returns the summation of the sizes of the chunks messages + * + * @return The summation of the sizes of the chunks messages, + * 0 on chunk invalidity. + */ size_t size() const; + /** + * Get the validity of the chunk. + * + * @return The validity of the chunk. + */ bool valid() const; private: + /** + * Helper method to get the raw chunk pointer. + * Deals with the chunk being memory mapped or not. + * + * @return The raw chunk pointer. + */ const uint8_t* get_chunk_ptr() const; + /** + * The internal chunk offset variable. + */ uint64_t chunk_offset_; + + /** + * Reader object to use for reading. + */ Reader* reader_; + /** + * Chunk buffer to use for reading. + */ std::shared_ptr chunk_buf_; }; // ChunkRef /** - * Convenienv iterator over all messages in a chunk. + * Convenient iterator class to go over all of the + * messages in a chunk. */ struct MessagesChunkIter { using iterator_category = std::forward_iterator_tag; @@ -448,47 +983,179 @@ struct MessagesChunkIter { using pointer = const std::unique_ptr; using reference = const MessageRef&; + /** + * Default MessagesChunkIter constructor that just zeros + * the internal fields. + */ MessagesChunkIter(); + + /** + * Initialize the MessagesChunkIter from another + * MessageChunkIter object. + * + * @param[in] other The other MessagesChunkIter to initalize from. + */ MessagesChunkIter(const MessagesChunkIter& other); + + /** + * Default assignment operation. + * + * @param[in] other The other MessageChunkIter to assign to. + */ MessagesChunkIter& operator=(const MessagesChunkIter& other) = default; + /** + * Gets the current ChunkRef via value. + * + * @return The current ChunkRef value. + */ const MessageRef operator*() const; + + /** + * Gets the current ChunkRef via smart pointer. + * + * @return The current ChunkRef smart pointer. + */ std::unique_ptr operator->() const; + + /** + * Advance to the next message in the chunk. + * + * @return *this + */ MessagesChunkIter& operator++(); + + /** + * @copydoc operator++() + */ MessagesChunkIter operator++(int); + + /** + * Regress to the previous message in the chunk. + * + * @return *this + */ MessagesChunkIter& operator--(); + + /** + * @copydoc operator--() + */ MessagesChunkIter operator--(int); + + /** + * Check if two MessagesChunkIter are equal. + * + * @param[in] other The other MessagesChunkIter to check against. + * @return If the two MessagesChunkIter are equal. + */ bool operator==(const MessagesChunkIter& other) const; + + /** + * Check if two MessagesChunkIter are not equal. + * + * @param[in] other The other MessagesChunkIter to check against. + * @return If the two MessagesChunkIter are not equal. + */ bool operator!=(const MessagesChunkIter& other) const; + /** + * Debug string representation. + * + * @return The string representation of a MessagesChunkIter. + */ std::string to_string() const; private: + /** + * Internal constructor for initializing a MessageChunkIter + * off of a ChunkRef and message index. + * + * @param[in] chunk_ref The ChunkRef to use when initializing. + * @param[in] msg_idx The message index in the ChunkRef + * to use when initializing. + */ MessagesChunkIter(const ChunkRef chunk_ref, const size_t msg_idx); + + /** + * Advance to the next message in the chunk. + */ void next(); + + /** + * Regress to the previous message in the chunk. + */ void prev(); + /** + * The internal ChunkRef var. + */ ChunkRef chunk_ref_; + + /** + * The current message index. + */ size_t msg_idx_; + friend class ChunkRef; }; // MessagesChunkIter class MessagesStreamingRange { public: + /** + * Begin function for std iterator support. + * + * @return A MessagesStreamingIter object for iteration. + */ MessagesStreamingIter begin() const; + + /** + * End function for std iterator support. + * + * @return A MessagesStreamingIter object for signifying + * the end of iteration. + */ MessagesStreamingIter end() const; + /** + * Debug string representation. + * + * @return The string representation of a MessagesStreamingRange. + */ std::string to_string() const; private: - // using range [start_ts, end_ts] <---- not inclusive .... !!! + /** + * Initialize from Reader on a set of filters.. + * //using range [start_ts, end_ts] <---- not inclusive .... !!! + * + * @param[in] start_ts The lowest timestamp to start with. + * @param[in] end_ts The highest timestamp to end with. + * @param[in] stream_ids The stream indicies to use with the streaming + * range. + * @param[in] reader The reader object to use for reading the OSF file. + */ MessagesStreamingRange(const ts_t start_ts, const ts_t end_ts, const std::vector& stream_ids, Reader* reader); + /** + * The lowest timestamp for the range. + */ ts_t start_ts_; + + /** + * The highest timestamp for the range. + */ ts_t end_ts_; + + /** + * The set of stream indicies in the range. + */ std::vector stream_ids_; + + /** + * The reader object to use to read the OSF file. + */ Reader* reader_; friend class Reader; }; // MessagesStreamingRange @@ -506,41 +1173,144 @@ struct MessagesStreamingIter { using opened_chunk_type = std::pair; + /** + * Comparison struct used for determining which chunk is greater. + */ struct greater_chunk_type { - bool operator()(const opened_chunk_type& a, - const opened_chunk_type& b) { - return a.first[a.second].ts() > b.first[b.second].ts(); - } + /** + * Comparison operator used for determining if the first is greater + * than the second. The comparison is based on the timestamps. + * + * @param[in] a The first chunk to compare. + * @param[in] b The second chunk to compare. + * @return If the first chunk is greater than the second chunk. + */ + bool operator()(const opened_chunk_type& a, const opened_chunk_type& b); }; + /** + * Default MessagesStreamingIter constructor that just zeros + * the internal fields. + */ MessagesStreamingIter(); + + /** + * Initialize the MessagesStreamingIter from another + * MessagesStreamingIter object. + * + * @param[in] other The other MessagesStreamingIter to initalize from. + */ MessagesStreamingIter(const MessagesStreamingIter& other); + + /** + * Default assignment operation. + * + * @param[in] other The other MessagesStreamingIter to assign to. + */ MessagesStreamingIter& operator=(const MessagesStreamingIter& other) = default; + /** + * Gets the current MessageRef via value. + * + * @return The current MessageRef value. + */ const MessageRef operator*() const; + + /** + * Gets the current MessageRef via smart pointer. + * + * @return The current MessageRef smart pointer. + */ std::unique_ptr operator->() const; + + /** + * Advance to the next message. + * + * @return *this + */ MessagesStreamingIter& operator++(); + + /** + * @copydoc operator++() + */ MessagesStreamingIter operator++(int); + + /** + * Check if two MessagesStreamingIter are equal. + * + * @param[in] other The other MessagesStreamingIter to check against. + * @return If the two MessagesStreamingIter are equal. + */ bool operator==(const MessagesStreamingIter& other) const; + + /** + * Check if two MessagesStreamingIter are not equal. + * + * @param[in] other The other MessagesStreamingIter to check against. + * @return If the two MessagesStreamingIter are not equal. + */ bool operator!=(const MessagesStreamingIter& other) const; + /** + * Debug string representation. + * + * @return The string representation of a MessagesStreamingIter. + */ std::string to_string() const; - void print_and_finish(); - private: - // using range [start_ts, end_ts) <---- not inclusive .... !!! + /** + * Initialize from Reader on a set of filters.. + * //using range [start_ts, end_ts] <---- not inclusive .... !!! + * + * @param[in] start_ts The lowest timestamp to start with. + * @param[in] end_ts The highest timestamp to end with. + * @param[in] stream_ids The stream indicies to use with the streaming + * range. + * @param[in] reader The reader object to use for reading the OSF file. + */ MessagesStreamingIter(const ts_t start_ts, const ts_t end_ts, const std::vector& stream_ids, Reader* reader); + + /** + * Advance to the next message. + */ void next(); + /** + * The current timestamp. + */ ts_t curr_ts_; + + /** + * The last timestamp. + */ ts_t end_ts_; + + /** + * The streams to iterate on. + */ std::vector stream_ids_; + + /** + * Used to hash the set of stream indexes. + * + * @todo Look at possibly removing this. + */ uint32_t stream_ids_hash_; + + /** + * The reader object used to read the OSF file. + */ Reader* reader_; + + /** + * Priority queue used to hold the chunks in timestamp order. + * + * @relates greater_chunk_type + */ std::priority_queue, greater_chunk_type> curr_chunks_{}; diff --git a/ouster_osf/include/ouster/osf/stream_lidar_scan.h b/ouster_osf/include/ouster/osf/stream_lidar_scan.h index d220c3cc..cf393138 100644 --- a/ouster_osf/include/ouster/osf/stream_lidar_scan.h +++ b/ouster_osf/include/ouster/osf/stream_lidar_scan.h @@ -16,67 +16,113 @@ namespace ouster { namespace osf { -// Cast `ls_src` LidarScan to a subset of fields with possible different -// underlying ChanFieldTypes. -// @return a copy of `ls_src` with transformed fields +/** + * Cast `ls_src` LidarScan to a subset of fields with possible different + * underlying ChanFieldTypes. + * + * @throws std::logic_error Exception on trying to slice a scan with only + * a subset of the requested scans + * + * @param[in] ls_src The LidarScan to cast. + * @param[in] field_types The field types to cast the LidarScan to. + * @return a copy of `ls_src` with transformed fields. + */ LidarScan slice_with_cast(const LidarScan& ls_src, const LidarScanFieldTypes& field_types); -// Zeros field -struct zero_field { - template - void operator()(Eigen::Ref> field_dest) { - field_dest.setZero(); - } -}; - /** * Metadata entry for LidarScanStream to store reference to a sensor and * field_types * - * @verbatim - * Fields: - * sensor_meta_id: metadata_ref - reference to LidarSensor metadata that - * describes the sensor configuration. - * field_types: LidarScan fields specs - * * OSF type: * ouster/v1/os_sensor/LidarScanStream * - * Flatbuffer definition file: + * Flat Buffer Reference: * fb/os_sensor/lidar_scan_stream.fbs - * @endverbatim - * */ class LidarScanStreamMeta : public MetadataEntryHelper { public: + /** + * @param[in] sensor_meta_id Reference to LidarSensor metadata that + * describes the sensor configuration. + * @param[in] field_types LidarScan fields specs, this argument is optional. + */ LidarScanStreamMeta(const uint32_t sensor_meta_id, - const LidarScanFieldTypes field_types = {}) - : sensor_meta_id_{sensor_meta_id}, - field_types_{field_types.begin(), field_types.end()} {} + const LidarScanFieldTypes field_types = {}); - uint32_t sensor_meta_id() const { return sensor_meta_id_; } - - const LidarScanFieldTypes& field_types() const { return field_types_; } + /** + * Return the sensor meta id. + * + * @return The sensor meta id. + */ + uint32_t sensor_meta_id() const; - // Simplified with MetadataEntryHelper: type()+clone() - // std::string type() const override; - // std::unique_ptr clone() const override; + /** + * Return the field types. + * + * @return The field types. + */ + const LidarScanFieldTypes& field_types() const; + /** + * @copydoc MetadataEntry::buffer + */ std::vector buffer() const final; + /** + * Create a LidarScanStreamMeta object from a byte array. + * + * @todo Figure out why this wasnt just done as a constructor overload. + * + * @relates MetadataEntry::from_buffer + * + * @param[in] buf The raw flatbuffer byte vector to initialize from. + * @return The new LidarScanStreamMeta cast as a MetadataEntry + */ static std::unique_ptr from_buffer( const std::vector& buf); + /** + * Get the string representation for the LidarScanStreamMeta object. + * + * @relates MetadataEntry::repr + * + * @return The string representation for the LidarScanStreamMeta object. + */ std::string repr() const override; private: + /** + * Internal store of the sensor id. + * + * Flat Buffer Reference: + * fb/os_sensor/lidar_scan_stream.fbs :: LidarScanStream :: sensor_id + */ uint32_t sensor_meta_id_{0}; + + /** + * Internal store of the field types. + * + * Flat Buffer Reference: + * fb/os_sensor/lidar_scan_stream.fbs :: LidarScanStream :: field_types + */ LidarScanFieldTypes field_types_; }; +/** @defgroup OSFTraitsLidarScanStreamMeta Templated struct for traits.*/ + +/** + * Templated struct for returning the OSF type string. + * + * @ingroup OSFTraitsLidarScanStreamMeta + */ template <> struct MetadataTraits { + /** + * Return the OSF type string. + * + * @return The OSF type string "ouster/v1/os_sensor/LidarScanStream". + */ static const std::string type() { return "ouster/v1/os_sensor/LidarScanStream"; } @@ -85,19 +131,18 @@ struct MetadataTraits { /** * LidarScanStream that encodes LidarScan objects into the messages. * - * @verbatim * Object type: ouster::sensor::LidarScan * Meta type: LidarScanStreamMeta (sensor_meta_id, field_types) * * Flatbuffer definition file: * fb/os_sensor/lidar_scan_stream.fbs - * @endverbatim - * */ class LidarScanStream : public MessageStream { - public: - LidarScanStream(Writer& writer, const uint32_t sensor_meta_id, - const LidarScanFieldTypes& field_types = {}); + friend class Writer; + friend class MessageRef; + + // Access key pattern used to only allow friends to call our constructor + struct Token {}; /** * Saves the object to the writer applying the coding/serizlization @@ -107,33 +152,79 @@ class LidarScanStream : public MessageStream { * @todo [pb]: Probably should be abstracted/extracted from all streams * we also might want to have the corresponding function to read back * sequentially from Stream that doesn't seem like fit into this model... + * + * @param[in] ts The timestamp to use for the lidar scan. + * @param[in] lidar_scan The lidar scan to write. */ void save(const ouster::osf::ts_t ts, const obj_type& lidar_scan); - /** Encode/serialize the object to the buffer of bytes */ + /** + * Encode/serialize the object to the buffer of bytes. + * + * @param[in] lidar_scan The lidar scan to turn into a vector of bytes. + * @return The byte vector representation of lidar_scan. + */ std::vector make_msg(const obj_type& lidar_scan); /** * Decode/deserialize the object from bytes buffer using the concrete * metadata type for the stream. - * metadata_provider is used to reconstruct any references to other - * metadata entries dependencies (like sensor_meta_id) + * + * @param[in] buf The buffer to decode into an object. + * @param[in] meta The concrete metadata type to use for decoding. + * @param[in] meta_provider Used to reconstruct any references to other + * metadata entries dependencies + * (like sensor_meta_id) + * @return Pointer to the decoded object. */ static std::unique_ptr decode_msg( const std::vector& buf, const meta_type& meta, const MetadataStore& meta_provider); - const meta_type& meta() const { return meta_; } + public: + /** + * @param[in] key Private class used to prevent non-friends from calling + * this. + * @param[in] writer The writer object to use to write messages out. + * @param[in] sensor_meta_id The sensor to use. + * @param[in] field_types LidarScan fields specs, this argument is optional. + */ + LidarScanStream(Token key, Writer& writer, const uint32_t sensor_meta_id, + const LidarScanFieldTypes& field_types = {}); + + /** + * Return the concrete metadata type. + * This has templated types. + * + * @return The concrete metadata type. + */ + + const meta_type& meta() const { return meta_; }; private: + /** + * The internal writer object to use to write messages out. + */ Writer& writer_; + /** + * The internal concrete metadata type. + */ meta_type meta_; + /** + * The internal flatbuffer id for the stream. + */ uint32_t stream_meta_id_{0}; + /** + * The internal flatbuffer id for the metadata. + */ uint32_t sensor_meta_id_{0}; + /** + * The internal sensor_info data. + */ sensor::sensor_info sensor_info_; }; diff --git a/ouster_osf/include/ouster/osf/writer.h b/ouster_osf/include/ouster/osf/writer.h index a00afdbc..dc97c662 100644 --- a/ouster_osf/include/ouster/osf/writer.h +++ b/ouster_osf/include/ouster/osf/writer.h @@ -4,7 +4,6 @@ * * @file writer.h * @brief OSF file Writer - * */ #pragma once @@ -16,63 +15,159 @@ namespace ouster { namespace osf { +class LidarScanStream; + /** * Chunks writing strategy that decides when and how exactly write chunks * to a file. See RFC 0018 for Standard and Streaming Layout description. */ class ChunksWriter { public: - virtual void saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& buf) = 0; + /** + * Save a message to a specified stream. + * + * @param[in] stream_id The stream id to associate with the message. + * @param[in] ts The timestamp for the messages. + * @param[in] buf A vector of message buffers to record. + */ + virtual void save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& buf) = 0; + + /** + * Finish the process of saving messages and write out the stream stats. + */ virtual void finish() = 0; + + /** + * Get the chunksize + * + * @return the chunk size + */ virtual uint32_t chunk_size() const = 0; + + /** + * Default deconstructor. + */ virtual ~ChunksWriter() = default; }; /** * %OSF Writer provides the base universal interface to store the collection * of metadata entries, streams and corresponding objects. - * - * Examples: - * @ref writer_test.cpp, writer_custom_test.cpp - * */ class Writer { + friend class StreamingLayoutCW; + public: - explicit Writer(const std::string& file_name); + /** + * @throws std::runtime_error Exception on file writing issues. + * + * @param[in] file_name The filename of the output OSF file. + * @param[in] chunk_size The chunk size in bytes to use for the OSF file. + * This argument is optional, and if not provided the default value of + * 2MB is used. If the current chunk being written exceeds + * the chunk_size, a new chunk will be started on the next call to + * save. This allows an application to tune the number of messages (e.g. + * lidar scans) per chunk, which affects the granularity of the message + * index stored in the StreamingInfo in the file metadata. A smaller + * chunk_size means more messages are indexed and a larger number of + * index entries. A more granular index allows for more precise + * seeking at the slight expense of a larger file. + */ + Writer(const std::string& file_name, uint32_t chunk_size = 0); + + /** + * @param[in] filename The filename to output to. + * @param[in] info The sensor info to use for a single stream OSF file. + * @param[in] chunk_size The chunksize to use for the OSF file, this + * parameter is optional. + * @param[in] field_types The fields from scans to actually save into the + * OSF. If not provided uses the fields from the + * first saved lidar scan for each stream. This + * parameter is optional. + */ + Writer(const std::string& filename, const ouster::sensor::sensor_info& info, + const LidarScanFieldTypes& field_types = LidarScanFieldTypes(), + uint32_t chunk_size = 0); - Writer(const std::string& file_name, const std::string& metadata_id, + /** + * @param[in] filename The filename to output to. + * @param[in] info The sensor info vector to use for a multi stream OSF + * file. + * @param[in] chunk_size The chunksize to use for the OSF file, this + * parameter is optional. + * @param[in] field_types The fields from scans to actually save into the + * OSF. If not provided uses the fields from the + * first saved lidar scan for each stream. This + * parameter is optional. + */ + Writer(const std::string& filename, + const std::vector& info, + const LidarScanFieldTypes& field_types = LidarScanFieldTypes(), uint32_t chunk_size = 0); + /** + * Add metadata to the OSF file. + * + * @tparam MetaType The type of metadata to add. + * @tparam MetaParams The type of meta parameters to add. + * + * @param[in] params The parameters to add. + */ template - uint32_t addMetadata(MetaParams&&... params) { + uint32_t add_metadata(MetaParams&&... params) { MetaType entry(std::forward(params)...); return meta_store_.add(entry); } - uint32_t addMetadata(MetadataEntry&& entry) { return addMetadata(entry); } + /** + * Adds a MetadataEntry to the OSF file. + * + * @param[in] entry The metadata entry to add to the OSF file. + */ + uint32_t add_metadata(MetadataEntry&& entry); - uint32_t addMetadata(MetadataEntry& entry) { - return meta_store_.add(entry); - } + /** + * @copydoc add_metadata(MetadataEntry&& entry) + */ + uint32_t add_metadata(MetadataEntry& entry); + + /** + * @defgroup OSFGetMetadataGroup Get specified metadata. + * Get and return a metadata entry. + * + * @param[in] metadata_id The id of the metadata to get and return. + * @return The correct MetadataEntry. + */ + + /** + * @copydoc OSFGetMetadataGroup + */ + std::shared_ptr get_metadata( + const uint32_t metadata_id) const; + /** + * @copydoc OSFGetMetadataGroup + * + * @tparam MetadataEntryClass The type of metadata to get and return. + */ template - std::shared_ptr getMetadata( + std::shared_ptr get_metadata( const uint32_t metadata_id) const { return meta_store_.get(metadata_id); } - std::shared_ptr getMetadata( - const uint32_t metadata_id) const { - return meta_store_.get(metadata_id); - } - /** * Creating streams by passing itself as first argument of the ctor and * following the all other parameters. + * + * @tparam Stream The specified stream object type. + * @tparam StreamParams The specified stream parameter types. + * + * @param[in] params The parameters to use when creating a stream. */ template - Stream createStream(StreamParams&&... params) { + Stream create_stream(StreamParams&&... params) { return Stream(*this, std::forward(params)...); } @@ -81,86 +176,467 @@ class Writer { * and timestamp. * @todo [pb]: It should be hidden into private/protected, but I don't see * yet how to do it and give an access to every derived Stream objects. + * + * @throws std::logic_error Exception on non existent stream id. + * + * @param[in] stream_id The stream to save the message to. + * @param[in] ts The timestamp to use for the message. + * @param[in] buf The message to save in the form of a byte vector. */ - void saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& buf); + void save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& buf); - const MetadataStore& meta_store() const { return meta_store_; } + /** + * Adds info about a sensor to the OSF and returns the stream index to + * to write scans to it's stream. + * + * @param[in] info The info of the sensor to add to the file. + * @param[in] field_types The fields from scans to actually save into the + * OSF. If not provided uses the fields from the + * first saved lidar scan for this sensor. This + * parameter is optional. + * + * @return The stream index for the newly added sensor. + */ + uint32_t add_sensor( + const ouster::sensor::sensor_info& info, + const LidarScanFieldTypes& field_types = LidarScanFieldTypes()); - const std::string& metadata_id() const { return metadata_id_; } - void setMetadataId(const std::string& id) { metadata_id_ = id; } + /** + * Save a single scan to the specified stream_index in an OSF file. + * The concept of the stream_index is related to the sensor_info vector. + * Consider the following: + @code{.cpp} + sensor_info info1; // The first sensor in this OSF file + sensor_info info2; // The second sensor in this OSF file + sensor_info info3; // The third sensor in this OSF file + + Writer output = Writer(filename, {info1, info2, info3}); + + LidarScan scan = RANDOM_SCAN_HERE; + + // To save the LidarScan of scan to the first sensor, you would do the + // following + output.save(0, scan); + + // To save the LidarScan of scan to the second sensor, you would do the + // following + output.save(1, scan); + + // To save the LidarScan of scan to the third sensor, you would do the + // following + output.save(2, scan); + @endcode + * + * @throws std::logic_error Will throw exception on writer being closed. + * @throws std::logic_error ///< Will throw exception on + * ///< out of bound stream_index. + * + * @param[in] stream_index The index of the corrosponding sensor_info to + * use. + * @param[in] scan The scan to save. + */ + void save(uint32_t stream_index, const LidarScan& scan); - const std::string& filename() const { return file_name_; } + /** + * Save a single scan to the specified stream_index in an OSF file indexed + * with the provided timestamp. + * + * @throws std::logic_error Will throw exception on writer being closed. + * @throws std::logic_error ///< Will throw exception on + * ///< out of bound stream_index. + * + * @param[in] stream_index The index of the corrosponding sensor_info to + * use. + * @param[in] scan The scan to save. + * @param[in] timestamp Timestamp to index this scan with. + */ + void save(uint32_t stream_index, const LidarScan& scan, + const ouster::osf::ts_t timestamp); - ChunksLayout chunks_layout() const { return chunks_layout_; } + /** + * Save multiple scans in an OSF file. + * The concept of the stream_index is related to the sensor_info vector. + * Consider the following: + @code{.cpp} + sensor_info info1; // The first sensor in this OSF file + sensor_info info2; // The second sensor in this OSF file + sensor_info info3; // The third sensor in this OSF file + + Writer output = Writer(filename, {info1, info2, info3}); + + LidarScan sensor1_scan = RANDOM_SCAN_HERE; + LidarScan sensor2_scan = RANDOM_SCAN_HERE; + LidarScan sensor3_scan = RANDOM_SCAN_HERE; + + // To save the scans matched appropriately to their sensors, you would do + // the following + output.save({sensor1_scan, sensor2_scan, sensor3_scan}); + @endcode + * + * + * @throws std::logic_error Will throw exception on writer being closed + * + * @param[in] scans The vector of scans to save. + */ + void save(const std::vector& scans); + + /** + * Returns the metadata store. This is used for getting the entire + * set of flatbuffer metadata entries. + * + * @return The flatbuffer metadata entries. + */ + const MetadataStore& meta_store() const; + + /** + * Returns the metadata id label. + * + * @return The metadata id label. + */ + const std::string& metadata_id() const; + + /** + * Sets the metadata id label. + * + * @param[in] id The metadata id label to set. + */ + void set_metadata_id(const std::string& id); + + /** + * Return the filename for the OSF file. + * + * @return The filename for the OSF file. + */ + const std::string& filename() const; + + /** + * Get the specific chunks layout of the OSF file. + * + * @relates ChunksLayout + * + * @return The chunks layout of the OSF file. + */ + ChunksLayout chunks_layout() const; + + /** + * Get the chunk size used for the OSF file. + * + * @return The chunk size for the OSF file. + */ uint32_t chunk_size() const; - // writes buf to the file with CRC32 appended and return the number of - // bytes writen to the file - uint64_t append(const uint8_t* buf, const uint64_t size); + /** + * Return the sensor info vector. + * Consider the following: + @code{.cpp} + sensor_info info1; // The first sensor in this OSF file + sensor_info info2; // The second sensor in this OSF file + sensor_info info3; // The third sensor in this OSF file + + Writer output = Writer(filename, {info1, info2, info3}); + + // The following will be true + output.sensor_info() == std::vector{info1, info2, info3}; + @endcode + * + * @return The sensor info vector. + */ + const std::vector& sensor_info() const; - uint64_t emit_chunk(const ts_t start_ts, const ts_t end_ts, - const std::vector& chunk_buf); + /** + * Get the specified sensor info + * Consider the following: + @code{.cpp} + sensor_info info1; // The first sensor in this OSF file + sensor_info info2; // The second sensor in this OSF file + sensor_info info3; // The third sensor in this OSF file + + Writer output = Writer(filename, {info1, info2, info3}); + + // The following will be true + output.sensor_info(0) == info1; + output.sensor_info(1) == info2; + output.sensor_info(2) == info3; + @endcode + * + * @param[in] stream_index The sensor info to return. + * @return The correct sensor info. + */ + const ouster::sensor::sensor_info sensor_info(int stream_index) const; + + /** + * Get the number of sensor_info objects. + * + * @return The sensor_info count. + */ + uint32_t sensor_info_count() const; - /** Finish file with a proper metadata object, and header */ + /** + * Finish file with a proper metadata object, and header. + */ void close(); + /** + * Returns if the writer is closed or not. + * + * @return If the writer is closed or not. + */ + inline bool is_closed() const { return finished_; } + + /** + * @relates close + */ ~Writer(); - // copy/move = delete everything + /** + * Disallow copying and moving. + */ Writer(const Writer&) = delete; + + /** + * Disallow copying and moving. + */ Writer& operator=(const Writer&) = delete; + + /** + * Disallow copying and moving. + */ Writer(Writer&&) = delete; + + /** + * Disallow copying and moving. + */ Writer& operator=(Writer&&) = delete; private: - // helper to construct the Metadata OSF Block at the end of writing + /** + * Helper to construct the Metadata OSF Block at the end of writing. + * This function takes the metadata entries from the metadata store + * and generates a raw flatbuffer blob for writing to file. + * + * @return The completed raw flatbuffer byte vector for + * the metadata section. + */ std::vector make_metadata() const; + /** + * Internal method used to save a scan to a specified stream_index + * specified stream. This method is here so that we can bypass + * is_closed checking for speed sake. The calling functions will + * do the check for us. + * + * @param[in] stream_index The stream to save to. + * @param[in] scan The scan to save. + * @param[in] time Timestamp to use to index scan. + */ + void _save(uint32_t stream_index, const LidarScan& scan, const ts_t time); + + /** + * Writes buf to the file with CRC32 appended and return the number of + * bytes writen to the file + * + * @throws std::logic_error Exception on bad file position. + * @throws std::logic_error Exception on a closed writer object. + * + * @param[in] buf The buffer to append. + * @param[in] size The size of the buffer to append. + * @return The number of bytes writen to the OSF file. + */ + uint64_t append(const uint8_t* buf, const uint64_t size); + + /** + * Save a specified chunk to the OSF file. + * + * @throws std::logic_error Exception on a size mismatch + * + * @param[in] start_ts The lowest timestamp in the chunk. + * @param[in] end_ts The highest timestamp in the chunk. + * @param[in] chunk_buf The byte vector representation of the chunk. + * @return The result offset in the OSF file. + */ + uint64_t emit_chunk(const ts_t start_ts, const ts_t end_ts, + const std::vector& chunk_buf); + + /** + * Internal filename of the OSF file. + */ std::string file_name_; + /** + * The size of the flatbuffer header blob. + */ uint32_t header_size_{0}; + + /** + * The internal file offset. + */ int64_t pos_{-1}; + + /** + * Internal status flag for whether we have started writing or not. + */ bool started_{false}; + + /** + * Internal status flag for whether the file has been closed or not. + * + * @relates close + */ bool finished_{false}; + /** + * The internal vector of chunks. + */ std::vector chunks_{}; + + /** + * The lowest timestamp in the OSF file. + */ ts_t start_ts_{ts_t::max()}; + + /** + * The highest timestamp in the OSF file. + */ ts_t end_ts_{ts_t::min()}; + + /** + * Cache of the chunk offset. + */ uint64_t next_chunk_offset_{0}; + /** + * The metadata id label. + */ std::string metadata_id_{}; + /** + * The internal chunk layout of the OSF file. + */ ChunksLayout chunks_layout_{ChunksLayout::LAYOUT_STANDARD}; + /** + * The store of metadata entries. + */ MetadataStore meta_store_{}; - // ChunksWriter is reponsible for chunking strategy + /** + * ChunksWriter is reponsible for chunking strategy. + */ std::shared_ptr chunks_writer_{nullptr}; + + /** + * Internal store of field types to serialize for lidar scans + */ + std::vector field_types_; + + /** + * Internal stream index to metadata map. + */ + std::map lidar_meta_id_; + + /** + * Internal stream index to LidarScanStream map. + */ + std::map> + lidar_streams_; + + /** + * The internal sensor_info store ordered by stream_index. + */ + std::vector sensor_info_; }; +/** + * Encapsulate chunk seriualization operations. + */ class ChunkBuilder { public: ChunkBuilder(){}; - void saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf); + + /** + * Save messages to the serialized chunks. + * + * @throws std::logic_error Exception on a size mismatch + * + * @param[in] stream_id The stream to save the message to. + * @param[in] ts The timestamp to use for the message. + * @param[in] msg_buf The message to save in the form of a byte vector. + */ + void save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& msg_buf); + + /** + * Completely wipe all data and start the chunk anew. + */ void reset(); + + /** + * Finish out the serialization of the chunk and return the raw + * flatbuffer output. + * + * @return The serialized chunk in a raw flatbuffer byte vector. + */ std::vector finish(); + + /** + * Returns the flatbufferbuilder size. + * + * @return The flatbufferbuilder size. + */ uint32_t size() const; + + /** + * Returns the number of messages saved so far. + * + * @return The number of messages saved so far. + */ uint32_t messages_count() const; - ts_t start_ts() const { return start_ts_; } - ts_t end_ts() const { return end_ts_; } + + /** + * The lowest timestamp in the chunk. + */ + ts_t start_ts() const; + + /** + * The highest timestamp in the chunk. + */ + ts_t end_ts() const; private: + /** + * Internal method for updating the corret start and end + * timestamps. + * + * @param[in] ts The timestamp to check against for start and end. + */ void update_start_end(const ts_t ts); + /** + * Internal status flag for whether the builder is finished or not. + */ bool finished_{false}; + /** + * Internal FlatBufferBuilder object used for the serialization. + */ flatbuffers::FlatBufferBuilder fbb_{0x7fff}; + + /** + * The lowest timestamp in the chunk. + */ ts_t start_ts_{ts_t::max()}; + + /** + * The highest timestamp in the chunk. + */ ts_t end_ts_{ts_t::min()}; + + /** + * Internal store of messages to be contained within the chunk + */ std::vector> messages_{}; }; } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/basics.cpp b/ouster_osf/src/basics.cpp index 92aa3a84..9d8c69ac 100644 --- a/ouster_osf/src/basics.cpp +++ b/ouster_osf/src/basics.cpp @@ -141,5 +141,43 @@ bool check_prefixed_size_block_crc(const uint8_t* buf, return res; } +std::function make_build_ls( + const ouster::sensor::sensor_info& info, + const LidarScanFieldTypes& ls_field_types, + std::function handler) { + const auto w = info.format.columns_per_frame; + const auto h = info.format.pixels_per_column; + auto temp_ls_field_types = ls_field_types; + std::shared_ptr ls(nullptr); + if (temp_ls_field_types.empty()) { + temp_ls_field_types = get_field_types(info); + } + ls = std::make_shared(w, h, temp_ls_field_types.begin(), + temp_ls_field_types.end()); + + auto pf = ouster::sensor::get_format(info); + auto build_ls_imp = ScanBatcher(w, pf); + osf::ts_t first_msg_ts{-1}; + return [handler, build_ls_imp, ls, first_msg_ts]( + const osf::ts_t msg_ts, const uint8_t* buf) mutable { + if (first_msg_ts == osf::ts_t{-1}) { + first_msg_ts = msg_ts; + } + if (build_ls_imp(buf, *ls)) { + handler(first_msg_ts, *ls); + // At this point we've just started accumulating new LidarScan, so + // we are saving the msg_ts (i.e. timestamp of a UDP packet) + // which contained the first lidar_packet + first_msg_ts = msg_ts; + } + }; +} + +std::function make_build_ls( + const ouster::sensor::sensor_info& info, + std::function handler) { + return make_build_ls(info, {}, handler); +} + } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/compat_ops.cpp b/ouster_osf/src/compat_ops.cpp index d12cb309..0e316cfd 100644 --- a/ouster_osf/src/compat_ops.cpp +++ b/ouster_osf/src/compat_ops.cpp @@ -5,11 +5,14 @@ #include "compat_ops.h" +#include #include #include #ifdef _WIN32 #include +#include +#include #include #include #include @@ -302,5 +305,101 @@ bool mmap_close(uint8_t* file_buf, const uint64_t file_size) { #endif } +int64_t truncate_file(const std::string& path, uint64_t filesize) { + int64_t actual_file_size = file_size(path); + if (actual_file_size < (int64_t)filesize) { + return -1; + } +#ifdef _WIN32 + int file_handle; + if (file_handle = _sopen(path.c_str(), _O_RDWR, _SH_DENYRW)) { + _chsize(file_handle, filesize); + _close(file_handle); + } +#else + truncate(path.c_str(), filesize); +#endif + return file_size(path); +} + +int64_t append_binary_file(const std::string& append_to_file_name, + const std::string& append_from_file_name) { + int64_t saved_size = -1; + + std::fstream append_to_file_stream; + std::fstream append_from_file_stream; + + // clang-format off + // There something seriously wrong with the clang formatting + // here. + append_to_file_stream.open(append_to_file_name, std::fstream::out | + std::fstream::app | std::fstream::binary); + append_from_file_stream.open(append_from_file_name, + std::fstream::in | std::fstream::binary); + // clang-format on + + if (append_to_file_stream.is_open()) { + if (append_from_file_stream.is_open()) { + append_from_file_stream.seekg(0, std::ios::end); + uint64_t from_file_size = append_from_file_stream.tellg(); + append_from_file_stream.seekg(0, std::ios::beg); + + append_to_file_stream.seekg(0, std::ios::end); + + append_to_file_stream << append_from_file_stream.rdbuf(); + saved_size = append_to_file_stream.tellg(); + } else { + std::cerr << "fail to open " << append_to_file_name << std::endl; + } + } else { + std::cerr << "fail to open " << append_from_file_name << std::endl; + } + + if (append_to_file_stream.is_open()) append_to_file_stream.close(); + if (append_from_file_stream.is_open()) append_from_file_stream.close(); + + return saved_size; +} + +int64_t copy_file_trailing_bytes(const std::string& source_file, + const std::string& target_file, + uint64_t offset) { + int64_t actual_file_size = file_size(source_file); + if (actual_file_size < (int64_t)offset) { + return -1; + } + + int64_t saved_size = -1; + + std::fstream source_file_stream; + std::fstream target_file_stream; + + // clang-format off + // There something seriously wrong with the clang formatting + // here. + target_file_stream.open(target_file, std::fstream::out | + std::fstream::trunc | std::fstream::binary); + source_file_stream.open(source_file, + std::fstream::in | std::fstream::binary); + // clang-format on + + if (target_file_stream.is_open()) { + if (source_file_stream.is_open()) { + source_file_stream.seekg(offset); + target_file_stream << source_file_stream.rdbuf(); + saved_size = target_file_stream.tellg(); + } else { + std::cerr << "fail to open " << source_file << std::endl; + } + } else { + std::cerr << "fail to open " << target_file << std::endl; + } + + if (source_file_stream.is_open()) source_file_stream.close(); + if (target_file_stream.is_open()) target_file_stream.close(); + + return saved_size; +} + } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/compat_ops.h b/ouster_osf/src/compat_ops.h index 0cac16f2..eb418ca5 100644 --- a/ouster_osf/src/compat_ops.h +++ b/ouster_osf/src/compat_ops.h @@ -5,11 +5,13 @@ #pragma once +#include #include namespace ouster { namespace osf { +/// @todo Fix the api comments in this file #ifdef _WIN32 constexpr char FILE_SEP = '\\'; #else @@ -52,5 +54,38 @@ bool mmap_close(uint8_t* file_buf, const uint64_t file_size); /// Get the last system error and return it in a string (not wide string) std::string get_last_error(); +/** + * Truncate a file to a certain length + * + * @param[in] path The file to truncate. + * @param[in] filesize The final size of the file. + * + * @return The number of bytes of the final file. + */ +int64_t truncate_file(const std::string& path, uint64_t filesize); + +/** + * Appends one file to another + * + * @param[in] append_to_file_name The file to append to. + * @param[in] append_from_file_name The file to append from. + * + * @return The number of bytes of the final file. + */ +int64_t append_binary_file(const std::string& append_to_file_name, + const std::string& append_from_file_name); + +/** + * Copies trailing bytes from a file + * + * @param[in] source_file The file to copy from. + * @param[in] target_file The file to copy to. + * @param[in] offset The offset in the source_file to start copying from. + * + * @return The number of bytes of the target file. + */ +int64_t copy_file_trailing_bytes(const std::string& source_file, + const std::string& target_file, + uint64_t offset); } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/fb_utils.cpp b/ouster_osf/src/fb_utils.cpp index cc82bcb5..f563ef3a 100644 --- a/ouster_osf/src/fb_utils.cpp +++ b/ouster_osf/src/fb_utils.cpp @@ -132,34 +132,5 @@ uint64_t finish_osf_file(const std::string& filename, return saved_size; } -void print_metadata_buf(const uint8_t* buf, const uint32_t buf_size) { - (void)buf_size; - auto a = ouster::osf::gen::GetSizePrefixedMetadata(buf); - std::cout << "=== Metadata: =====================" << std::endl; - std::cout << "id = " << a->id()->str() << std::endl; - std::cout << "start_ts = " << a->start_ts() << std::endl; - std::cout << "end_ts = " << a->end_ts() << std::endl; - auto cs = a->chunks(); - std::cout << "chunks.size = " << cs->size() << std::endl; - for (uint32_t i = 0; i < cs->size(); ++i) { - auto c = cs->Get(i); - std::cout << " chunks[" << i << "] = " << c->start_ts() << ", " - << c->end_ts() << ", " << c->offset() << std::endl; - } - auto ms = a->entries(); - std::cout << "entries.size = " << ms->size() << std::endl; - for (uint32_t i = 0; i < ms->size(); ++i) { - auto e = ms->Get(i); - std::cout << " entry[" << i << "] = " << e->id() - << ", type = " << e->type()->str() << std::endl; - auto buffer = e->buffer(); - std::cout << " buffer_size = " << buffer->size() << ", vals = ["; - std::cout << osf::to_string(buffer->Data(), - static_cast(buffer->size()), 100) - << "]" << std::endl; - ; - } -} - } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/fb_utils.h b/ouster_osf/src/fb_utils.h index eaa18493..ddfb5563 100644 --- a/ouster_osf/src/fb_utils.h +++ b/ouster_osf/src/fb_utils.h @@ -28,9 +28,10 @@ inline const gen::Header* get_osf_header_from_buf(const uint8_t* buf) { * Verifies the validity of Header buffer and whether it's safe to read it. * It's just checking the well formed Flatbuffer table (not CRC32 check here) * - * @param buf Header buffer, size prefixed - * @param buf_size buffer size (with prefix size bytes but not including CRC32) - * @return true if buffer is valid and can be read + * @param[in] buf Header buffer, size prefixed + * @param[in] buf_size buffer size (with prefix size bytes but not including + * CRC32) + * @return true if buffer is valid and can be read */ inline bool verify_osf_header_buf(const uint8_t* buf, const uint32_t buf_size) { auto verifier = flatbuffers::Verifier(buf, buf_size); @@ -41,9 +42,9 @@ inline bool verify_osf_header_buf(const uint8_t* buf, const uint32_t buf_size) { * Checks the validity of a Metadata buffer and whether it's safe to read it. * It's checking the well formed Flatbuffer table and CRC32. * - * @param buf metadata buffer, size prefixed - * @param buf_size buffer size (with CRC32 and prefix size bytes) - * @return true if buffer is valid and can be read + * @param[in] buf metadata buffer, size prefixed + * @param[in] buf_size buffer size (with CRC32 and prefix size bytes) + * @return true if buffer is valid and can be read */ bool check_osf_metadata_buf(const uint8_t* buf, const uint32_t buf_size); @@ -51,13 +52,20 @@ bool check_osf_metadata_buf(const uint8_t* buf, const uint32_t buf_size); * Checks the validity of a Chunk buffer and whether it's safe to read it. * It's checking the well formed Flatbuffer table and CRC32. * - * @param buf metadata buffer, size prefixed - * @param buf_size buffer size (with CRC32 and prefix size bytes) - * @return true if buffer is valid and can be read + * @param[in] buf metadata buffer, size prefixed + * @param[in] buf_size buffer size (with CRC32 and prefix size bytes) + * @return true if buffer is valid and can be read */ bool check_osf_chunk_buf(const uint8_t* buf, const uint32_t buf_size); -/** transforms Flatbuffers vector to a std::vector. */ +/** + * Transforms Flatbuffers vector to a std::vector. + * + * @tparam T The type of the vector to transform. + * + * @param[in] fb_vec The vector to transform. + * @return The transformed vector. + **/ template std::vector vector_from_fb_vector(const flatbuffers::Vector* fb_vec); @@ -68,15 +76,14 @@ std::vector vector_from_fb_vector(const flatbuffers::Vector* fb_vec); * CRC32 field in the end. Successfull operation writes size + 4 bytes to the * file. * - * @param buf pointer to the data to save, full content of the buffer used + * @param[in] buf pointer to the data to save, full content of the buffer used * to calculate CRC - * @param size number of bytes to read from buffer and store to the file - * @param filename full path to the file - * @param append if true appends the content to the end of the file, - * otherwise - overwrite the file with the current buffer. - * @return number of bytes actuallt written to the file. Successfull write is + * @param[in] size number of bytes to read from buffer and store to the file + * @param[in] filename full path to the file + * @param[in] append if true appends the content to the end of the file, + * otherwise - overwrite the file with the current buffer. + * @return Number of bytes actually written to the file. Successfull write is * size + 4 bytes (4 bytes for CRC field) - * */ uint64_t buffer_to_file(const uint8_t* buf, const uint64_t size, const std::string& filename, bool append = false); @@ -86,11 +93,11 @@ uint64_t buffer_to_file(const uint8_t* buf, const uint64_t size, * appended to the actual bytes. Usually it's a size prefixed finished builder * but not necessarily * - * @param builder Flatbuffers builder - * @param filename filename to save bytes - * @param append if true appends the content to the end of the file, - * otherwise - overwrite the file with the current buffer. - * @return number of bytes actuallt written to the file. Successfull write is + * @param[in] builder Flatbuffers builder + * @param[in] filename filename to save bytes + * @param[in] append if true appends the content to the end of the file, + * otherwise - overwrite the file with the current buffer. + * @return Number of bytes actually written to the file. Successfull write is * size + 4 bytes (4 bytes for CRC field) */ uint64_t builder_to_file(flatbuffers::FlatBufferBuilder& builder, @@ -99,8 +106,8 @@ uint64_t builder_to_file(flatbuffers::FlatBufferBuilder& builder, /** * Starts the OSF v2 file with a header (in INVALID state). * - * @param filename of the file to be created. Overwrite if file exists. - * + * @param[in] filename of the file to be created. Overwrite if file exists. + * @return Number of bytes actually written to the file. */ uint64_t start_osf_file(const std::string& filename); @@ -108,15 +115,13 @@ uint64_t start_osf_file(const std::string& filename); * Finish OSF v2 file with updated offset to metadata and filesize. As a * result file left in VALID state. * - * @param filename of the file to be created. Overwrite if file exists. - * @return number of bytes actuallt written to the file. + * @param[in] filename of the file to be created. Overwrite if file exists. + * @param[in] metadata_offset The offset to the metadata blob. + * @param[in] metadata_size The size of the metadata blob. + * @return Number of bytes actually written to the file. */ uint64_t finish_osf_file(const std::string& filename, const uint64_t metadata_offset, const uint32_t metadata_size); - -/** Debug method to print Flatbuffers Metadata buffer */ -void print_metadata_buf(const uint8_t* buf, const uint32_t buf_size); - } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/file.cpp b/ouster_osf/src/file.cpp index 8fe1f57c..70011abf 100644 --- a/ouster_osf/src/file.cpp +++ b/ouster_osf/src/file.cpp @@ -100,6 +100,10 @@ OsfFile::OsfFile(const std::string& filename, OpenMode mode) : OsfFile() { } } +uint64_t OsfFile::size() const { return size_; }; + +std::string OsfFile::filename() const { return filename_; } + OSF_VERSION OsfFile::version() { if (!good()) { return OSF_VERSION::V_INVALID; @@ -150,7 +154,11 @@ bool OsfFile::valid() { } if (osf_header->file_length() != size_) { - print_error(filename_, "OSF header file size field is incorrect."); + std::stringstream ss; + ss << "OSF file size does not match the stored value"; + ss << " Expected: " << size_; + ss << " Actual: " << osf_header->file_length(); + print_error(filename_, ss.str()); return false; } @@ -172,9 +180,16 @@ bool OsfFile::valid() { return true; } +bool OsfFile::good() const { return state_ == FileState::GOOD; } + +bool OsfFile::operator!() const { return !good(); }; + +OsfFile::operator bool() const { return good(); }; + +uint64_t OsfFile::offset() const { return offset_; } // ========= Geneal Data Access ============= -OsfFile& OsfFile::seek(const uint64_t pos) { +OsfFile& OsfFile::seek(uint64_t pos) { if (!good()) throw std::logic_error("bad osf file"); if (pos > size_) { std::stringstream ss; @@ -372,4 +387,4 @@ uint8_t* OsfFile::get_metadata_chunk_ptr() { } } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/layout_standard.cpp b/ouster_osf/src/layout_standard.cpp deleted file mode 100644 index 8837c3e0..00000000 --- a/ouster_osf/src/layout_standard.cpp +++ /dev/null @@ -1,42 +0,0 @@ -/** - * Copyright(c) 2021, Ouster, Inc. - * All rights reserved. - */ - -#include "ouster/osf/layout_standard.h" - -#include - -#include "ouster/osf/writer.h" - -namespace ouster { -namespace osf { - -StandardLayoutCW::StandardLayoutCW(Writer& writer, uint32_t chunk_size) - : chunk_size_{chunk_size ? chunk_size : STANDARD_DEFAULT_CHUNK_SIZE}, - writer_{writer} {} - -void StandardLayoutCW::saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) { - if (chunk_builder_.size() + msg_buf.size() > chunk_size_) { - finish_chunk(); - } - - chunk_builder_.saveMessage(stream_id, ts, msg_buf); -} - -void StandardLayoutCW::finish_chunk() { - std::vector bb = chunk_builder_.finish(); - if (!bb.empty()) { - writer_.emit_chunk(chunk_builder_.start_ts(), chunk_builder_.end_ts(), - bb); - } - - // Prepare for the new chunk messages - chunk_builder_.reset(); -} - -void StandardLayoutCW::finish() { finish_chunk(); } - -} // namespace osf -} // namespace ouster \ No newline at end of file diff --git a/ouster_osf/src/layout_streaming.cpp b/ouster_osf/src/layout_streaming.cpp index 0a881d72..d4ba8caa 100644 --- a/ouster_osf/src/layout_streaming.cpp +++ b/ouster_osf/src/layout_streaming.cpp @@ -17,8 +17,8 @@ StreamingLayoutCW::StreamingLayoutCW(Writer& writer, uint32_t chunk_size) : chunk_size_{chunk_size ? chunk_size : STREAMING_DEFAULT_CHUNK_SIZE}, writer_{writer} {} -void StreamingLayoutCW::saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) { +void StreamingLayoutCW::save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& msg_buf) { if (!chunk_builders_.count(stream_id)) { chunk_builders_.insert({stream_id, std::make_shared()}); } @@ -28,7 +28,7 @@ void StreamingLayoutCW::saveMessage(const uint32_t stream_id, const ts_t ts, // checking non-decreasing invariant of chunks and messages if (chunk_builder->end_ts() > ts) { std::stringstream err; - err << "ERROR: Can't write wirh a decreasing timestamp: " << ts.count() + err << "ERROR: Can't write with a decreasing timestamp: " << ts.count() << " for stream_id: " << stream_id << " ( previous recorded timestamp: " << chunk_builder->end_ts().count() << ")"; @@ -39,12 +39,23 @@ void StreamingLayoutCW::saveMessage(const uint32_t stream_id, const ts_t ts, finish_chunk(stream_id, chunk_builder); } - chunk_builder->saveMessage(stream_id, ts, msg_buf); + chunk_builder->save_message(stream_id, ts, msg_buf); // update running statistics per stream stats_message(stream_id, ts, msg_buf); } +void StreamingLayoutCW::finish() { + for (auto& cb_it : chunk_builders_) { + finish_chunk(cb_it.first, cb_it.second); + } + + writer_.add_metadata(StreamingInfo{ + chunk_stream_id_, {stream_stats_.begin(), stream_stats_.end()}}); +} + +uint32_t StreamingLayoutCW::chunk_size() const { return chunk_size_; } + void StreamingLayoutCW::stats_message(const uint32_t stream_id, const ts_t ts, const std::vector& msg_buf) { auto msg_size = static_cast(msg_buf.size()); @@ -70,15 +81,5 @@ void StreamingLayoutCW::finish_chunk( // Prepare for the new chunk messages chunk_builder->reset(); } - -void StreamingLayoutCW::finish() { - for (auto& cb_it : chunk_builders_) { - finish_chunk(cb_it.first, cb_it.second); - } - - writer_.addMetadata(StreamingInfo{ - chunk_stream_id_, {stream_stats_.begin(), stream_stats_.end()}}); -} - } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/meta_extrinsics.cpp b/ouster_osf/src/meta_extrinsics.cpp index 1445d9b5..f9ca941c 100644 --- a/ouster_osf/src/meta_extrinsics.cpp +++ b/ouster_osf/src/meta_extrinsics.cpp @@ -11,6 +11,16 @@ namespace ouster { namespace osf { +Extrinsics::Extrinsics(const mat4d& extrinsics, uint32_t ref_meta_id, + const std::string& name) + : extrinsics_(extrinsics), ref_meta_id_{ref_meta_id}, name_{name} {} + +const mat4d& Extrinsics::extrinsics() const { return extrinsics_; } + +const std::string& Extrinsics::name() const { return name_; } + +uint32_t Extrinsics::ref_meta_id() const { return ref_meta_id_; } + std::vector Extrinsics::buffer() const { flatbuffers::FlatBufferBuilder fbb = flatbuffers::FlatBufferBuilder(256); std::vector extrinsic_vec(16); diff --git a/ouster_osf/src/meta_lidar_sensor.cpp b/ouster_osf/src/meta_lidar_sensor.cpp index 2f190c8f..6162a68c 100644 --- a/ouster_osf/src/meta_lidar_sensor.cpp +++ b/ouster_osf/src/meta_lidar_sensor.cpp @@ -10,11 +10,23 @@ #include "json_utils.h" #include "ouster/osf/basics.h" +using sensor_info = ouster::sensor::sensor_info; + namespace ouster { namespace osf { // === Lidar Sensor stream/msgs functions ==================== +/** + * Internal helper function for creating flatbuffer blobs to represent + * LidarSensor objects. + * + * @param[in] fbb The flatbufferbuilder to use when generating the blob. + * @param[in] sensor_metadata ///< The json string representation of the + * ///< sensor_info to use when creating + * ///< the flatbuffer blob. + * @return The offset pointer inside the flatbufferbuilder to the new section. + */ flatbuffers::Offset create_lidar_sensor( flatbuffers::FlatBufferBuilder& fbb, const std::string& sensor_metadata) { auto ls_offset = @@ -22,6 +34,15 @@ flatbuffers::Offset create_lidar_sensor( return ls_offset; } +/** + * Internal helper function for restoring a LidarSensor's json string + * representation of a sensor_info from a raw flatbuffer byte vector. + * + * @param[in] buf The flatbuffer byte vector. + * @return ///< The json string representation of the sensor_info object + * ///< contained within the flatbuffer blob. + */ + std::unique_ptr restore_lidar_sensor( const std::vector buf) { auto lidar_sensor = v2::GetSizePrefixedLidarSensor(buf.data()); @@ -33,6 +54,17 @@ std::unique_ptr restore_lidar_sensor( return std::make_unique(sensor_metadata); } +LidarSensor::LidarSensor(const sensor_info& si) + : sensor_info_(si), metadata_(si.updated_metadata_string()) {} + +LidarSensor::LidarSensor(const std::string& sensor_metadata) + : sensor_info_(sensor::parse_metadata(sensor_metadata)), + metadata_(sensor_metadata) {} + +const sensor_info& LidarSensor::info() const { return sensor_info_; } + +const std::string& LidarSensor::metadata() const { return metadata_; } + std::vector LidarSensor::buffer() const { flatbuffers::FlatBufferBuilder fbb = flatbuffers::FlatBufferBuilder(32768); auto ls_offset = create_lidar_sensor(fbb, metadata_); @@ -65,5 +97,7 @@ std::string LidarSensor::repr() const { return json_string(lidar_sensor_obj); }; +std::string LidarSensor::to_string() const { return repr(); }; + } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/meta_streaming_info.cpp b/ouster_osf/src/meta_streaming_info.cpp index 1eae41eb..e8ee7040 100644 --- a/ouster_osf/src/meta_streaming_info.cpp +++ b/ouster_osf/src/meta_streaming_info.cpp @@ -16,7 +16,7 @@ namespace ouster { namespace osf { -std::string to_string(ChunkInfo chunk_info) { +std::string to_string(const ChunkInfo& chunk_info) { std::stringstream ss; ss << "{offset = " << chunk_info.offset << ", stream_id = " << chunk_info.stream_id @@ -24,6 +24,23 @@ std::string to_string(ChunkInfo chunk_info) { return ss.str(); } +StreamStats::StreamStats(uint32_t s_id, ts_t t, uint32_t msg_size) + : stream_id{s_id}, + start_ts{t}, + end_ts{t}, + message_count{1}, + message_avg_size{msg_size} {}; + +void StreamStats::update(ts_t t, uint32_t msg_size) { + if (start_ts > t) start_ts = t; + if (end_ts < t) end_ts = t; + ++message_count; + int avg_size = static_cast(message_avg_size); + avg_size = avg_size + (static_cast(msg_size) - avg_size) / + static_cast(message_count); + message_avg_size = static_cast(avg_size); +} + std::string to_string(const StreamStats& stream_stats) { std::stringstream ss; ss << "{stream_id = " << stream_stats.stream_id @@ -62,6 +79,25 @@ flatbuffers::Offset create_streaming_info( return si_offset; } +StreamingInfo::StreamingInfo( + const std::vector>& chunks_info, + const std::vector>& stream_stats) + : chunks_info_{chunks_info.begin(), chunks_info.end()}, + stream_stats_{stream_stats.begin(), stream_stats.end()} {} + +StreamingInfo::StreamingInfo( + const std::map& chunks_info, + const std::map& stream_stats) + : chunks_info_(chunks_info), stream_stats_(stream_stats) {} + +std::map& StreamingInfo::chunks_info() { + return chunks_info_; +} + +std::map& StreamingInfo::stream_stats() { + return stream_stats_; +} + std::vector StreamingInfo::buffer() const { flatbuffers::FlatBufferBuilder fbb = flatbuffers::FlatBufferBuilder(32768); auto si_offset = create_streaming_info(fbb, chunks_info_, stream_stats_); @@ -138,4 +174,4 @@ std::string StreamingInfo::repr() const { }; } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/metadata.cpp b/ouster_osf/src/metadata.cpp index c40ff6d6..02623534 100644 --- a/ouster_osf/src/metadata.cpp +++ b/ouster_osf/src/metadata.cpp @@ -27,6 +27,9 @@ std::string MetadataEntry::to_string() const { return ss.str(); } +void MetadataEntry::setId(uint32_t id) { id_ = id; } +uint32_t MetadataEntry::id() const { return id_; } + flatbuffers::Offset MetadataEntry::make_entry( flatbuffers::FlatBufferBuilder& fbb) const { auto buf = this->buffer(); @@ -57,6 +60,23 @@ MetadataEntry::get_registry() { return registry_; } +MetadataEntryRef::MetadataEntryRef(const uint8_t* buf) : buf_{buf} { + const gen::MetadataEntry* meta_entry = + reinterpret_cast(buf_); + buf_type_ = meta_entry->type()->str(); + setId(meta_entry->id()); +} + +std::string MetadataEntryRef::type() const { return buf_type_; } + +std::string MetadataEntryRef::static_type() const { + return metadata_type(); +} + +std::unique_ptr MetadataEntryRef::clone() const { + return std::make_unique(*this); +} + std::vector MetadataEntryRef::buffer() const { const gen::MetadataEntry* meta_entry = reinterpret_cast(buf_); @@ -79,6 +99,8 @@ std::unique_ptr MetadataEntryRef::as_type() const { return m; } +void MetadataEntryRef::setId(uint32_t id) { MetadataEntry::setId(id); } + std::vector> MetadataStore::make_entries(flatbuffers::FlatBufferBuilder& fbb) const { using FbEntriesVector = @@ -91,5 +113,41 @@ MetadataStore::make_entries(flatbuffers::FlatBufferBuilder& fbb) const { return entries; } +uint32_t MetadataStore::add(MetadataEntry&& entry) { return add(entry); } + +uint32_t MetadataStore::add(MetadataEntry& entry) { + if (entry.id() == 0) { + /// @todo [pb]: Figure out the whole sequence of ids in addMetas in + /// the Reader case + assignId(entry); + } else if (metadata_entries_.find(entry.id()) != metadata_entries_.end()) { + std::cout << "WARNING: MetadataStore: ENTRY EXISTS! id = " << entry.id() + << std::endl; + return entry.id(); + } else if (next_meta_id_ == entry.id()) { + // Find next available next_meta_id_ so we avoid id collisions + ++next_meta_id_; + auto next_it = metadata_entries_.lower_bound(next_meta_id_); + while (next_it != metadata_entries_.end() && + next_it->first == next_meta_id_) { + ++next_meta_id_; + next_it = metadata_entries_.lower_bound(next_meta_id_); + } + } + + metadata_entries_.emplace(entry.id(), entry.clone()); + return entry.id(); +} + +size_t MetadataStore::size() const { return metadata_entries_.size(); } + +const MetadataStore::MetadataEntriesMap& MetadataStore::entries() const { + return metadata_entries_; +} + +void MetadataStore::assignId(MetadataEntry& entry) { + entry.setId(next_meta_id_++); +} + } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/operations.cpp b/ouster_osf/src/operations.cpp index 938c121e..b1522335 100644 --- a/ouster_osf/src/operations.cpp +++ b/ouster_osf/src/operations.cpp @@ -9,6 +9,7 @@ #include #include +#include "compat_ops.h" #include "fb_utils.h" #include "json/json.h" #include "json_utils.h" @@ -40,7 +41,7 @@ std::string dump_metadata(const std::string& file, bool full) { Reader reader(file); - root["metadata"]["id"] = reader.id(); + root["metadata"]["id"] = reader.metadata_id(); root["metadata"]["start_ts"] = static_cast(reader.start_ts().count()); root["metadata"]["end_ts"] = @@ -107,7 +108,7 @@ void parse_and_print(const std::string& file, bool with_decoding) { thread_local std::atomic_bool quit{false}; auto sig = std::signal(SIGINT, [](int) { quit = true; }); - for (const auto msg : reader.messages_standard()) { + for (const auto msg : reader.messages()) { if (msg.is()) { std::cout << " Ls ts: " << msg.ts().count() << ", stream_id = " << msg.id(); @@ -144,86 +145,117 @@ void parse_and_print(const std::string& file, bool with_decoding) { std::cout << " other (NOT IMPLEMENTED) count = " << other_c << std::endl; } -bool pcap_to_osf(const std::string& pcap_filename, - const std::string& meta_filename, int lidar_port, - const std::string& osf_filename, int chunk_size) { - std::cout << "Converting: " << std::endl - << " PCAP file: " << pcap_filename << std::endl - << " with json file: " << meta_filename << std::endl - << " to OSF file: " << osf_filename << std::endl - << " chunk_size: " - << (chunk_size ? std::to_string(chunk_size) : "DEFAULT") - << std::endl; - - PcapRawSource pcap_source{pcap_filename}; - - std::string sensor_metadata = read_text_file(meta_filename); - - auto info = sensor::parse_metadata(sensor_metadata); - - std::cout << "Using sensor data:\n" - << " lidar_port = " << lidar_port << std::endl; +int64_t backup_osf_file_metablob(const std::string& osf_file_name, + const std::string& backup_file_name) { + uint64_t metadata_offset = 0; + { + OsfFile osf_file{osf_file_name}; + metadata_offset = osf_file.metadata_offset(); + } - std::cout << "Processing PCAP packects to OSF messages "; + // Backup the current metadata blob + return copy_file_trailing_bytes(osf_file_name, backup_file_name, + metadata_offset); +} - Writer writer{osf_filename, "ouster-cli osf from_pcap", - static_cast(chunk_size)}; +int64_t restore_osf_file_metablob(const std::string& osf_file_name, + const std::string& backup_file_name) { + uint64_t metadata_offset = 0; + { + OsfFile osf_file{osf_file_name}; + metadata_offset = osf_file.metadata_offset(); + } + truncate_file(osf_file_name, metadata_offset); + auto result = append_binary_file(osf_file_name, backup_file_name); + + if (result > 0) { + finish_osf_file(osf_file_name, metadata_offset, + result - metadata_offset); + } else { + return -1; + } + return result; +} - std::cout << "(chunk_size: " << writer.chunk_size() << "): ..." +/** + * Internal simplification function for generating modified + * metadata flatbuffer blobs. + * + * @param[in] file_name Filename of the OSF file to modify + * @param[in] new_metadata List of new sensor infos to populate + * @return The generated flatbuffer metadata blob + */ +flatbuffers::FlatBufferBuilder _generate_modify_metadata_fbb( + const std::string& file_name, + const std::vector& new_metadata) { + auto metadata_fbb = flatbuffers::FlatBufferBuilder(32768); + Reader reader(file_name); + + std::string metadata_id = reader.metadata_id(); + ts_t start_ts = reader.start_ts(); + ts_t end_ts = reader.end_ts(); + + /// @todo on OsfFile refactor, make a copy constructor for MetadataStore + MetadataStore new_meta_store; + auto old_meta_store = reader.meta_store(); + std::cout << "Looking for non sensor info metadata in old metastore" << std::endl; - - auto field_types = get_field_types(info); - - // Overwrite field_types for Legacy UDP profile, so to reduce the LidarScan - // encoding sizes (saves about ~15% of disk/bandwidth) - if (info.format.udp_profile_lidar == - sensor::UDPProfileLidar::PROFILE_LIDAR_LEGACY) { - field_types.clear(); - field_types.emplace_back(sensor::ChanField::RANGE, - sensor::ChanFieldType::UINT32); - field_types.emplace_back(sensor::ChanField::SIGNAL, - sensor::ChanFieldType::UINT16); - field_types.emplace_back(sensor::ChanField::REFLECTIVITY, - sensor::ChanFieldType::UINT16); - field_types.emplace_back(sensor::ChanField::NEAR_IR, - sensor::ChanFieldType::UINT16); + for (const auto& entry : old_meta_store.entries()) { + std::cout << "Found: " << entry.second->type() << " "; + /// @todo figure out why there isnt an easy def for this + if (entry.second->type() != "ouster/v1/os_sensor/LidarSensor") { + new_meta_store.add(*entry.second); + std::cout << "Is non sensor_info, adding" << std::endl; + } else { + std::cout << std::endl; + } } - std::cout << "LidarScan field_types: " << ouster::to_string(field_types) - << std::endl; - auto sensor_meta_id = writer.addMetadata(sensor_metadata); - auto ls_stream = - writer.createStream(sensor_meta_id, field_types); + for (const auto& entry : new_metadata) { + new_meta_store.add(LidarSensor(entry)); + } - int ls_cnt = 0; + std::vector> entries = + new_meta_store.make_entries(metadata_fbb); - if (lidar_port > 0) { - pcap_source.addLidarDataHandler( - lidar_port, info, - [&ls_cnt, &ls_stream](const osf::ts_t ts, const LidarScan& ls) { - ls_cnt++; - ls_stream.save(ts, ls); - }); + std::vector chunks{}; + for (const auto& entry : reader.chunks()) { + chunks.emplace_back(entry.start_ts().count(), entry.end_ts().count(), + entry.offset()); } - // TODO[pb]: Remove the SIGINT handlers from C++ wrapped function used in - // Python bindings - // https://pybind11.readthedocs.io/en/stable/faq.html#how-can-i-properly-handle-ctrl-c-in-long-running-functions - thread_local std::atomic_bool quit{false}; - auto sig = std::signal(SIGINT, [](int) { quit = true; }); + auto metadata = ouster::osf::gen::CreateMetadataDirect( + metadata_fbb, metadata_id.c_str(), + !chunks.empty() ? start_ts.count() : 0, + !chunks.empty() ? end_ts.count() : 0, &chunks, &entries); - pcap_source.runWhile( - [](const sensor_utils::packet_info&) { return !quit; }); + metadata_fbb.FinishSizePrefixed(metadata, + ouster::osf::gen::MetadataIdentifier()); + return metadata_fbb; +} - // restore signal handler - std::signal(SIGINT, sig); +int64_t osf_file_modify_metadata( + const std::string& file_name, + const std::vector& new_metadata) { + std::string temp_dir; + std::string temp_path; + int64_t saved_bytes = -2; + uint64_t metadata_offset = 0; + + // Scope the reading portion so that we dont run into read write file + // locks + { + OsfFile osf_file{file_name}; + metadata_offset = osf_file.metadata_offset(); + } - writer.close(); + auto metadata_fbb = _generate_modify_metadata_fbb(file_name, new_metadata); - std::cout << "Saved to OSF file:" << std::endl - << " Lidar Scan messages: " << ls_cnt << std::endl; + truncate_file(file_name, metadata_offset); + saved_bytes = builder_to_file(metadata_fbb, file_name, true); + finish_osf_file(file_name, metadata_offset, saved_bytes); - return true; + return saved_bytes; } } // namespace osf diff --git a/ouster_osf/src/png_tools.cpp b/ouster_osf/src/png_tools.cpp index ad28971d..82cd213d 100644 --- a/ouster_osf/src/png_tools.cpp +++ b/ouster_osf/src/png_tools.cpp @@ -9,9 +9,9 @@ #include #include +#include #include #include -#include #include "ouster/lidar_scan.h" @@ -83,11 +83,9 @@ void png_osf_read_data(png_structp png_ptr, png_bytep bytes, vec_read->read(bytes, bytes_len); }; -// void user_read_data(png_structp png_ptr, png_bytep data, png_size_t length); - /** * It's needed for custom png IO operations... but I've never seen it's called. - * And also there are no need to flush writer to std::vector buufer in our case. + * And also there are no need to flush writer to std::vector buffer in our case. */ void png_osf_flush_data(png_structp){}; @@ -154,7 +152,7 @@ void png_osf_write_start(png_structp png_ptr, png_infop png_info_ptr, } // ========== Encode Functions =================================== - +#ifdef OUSTER_OSF_NO_THREADING ScanData scanEncodeFieldsSingleThread(const LidarScan& lidar_scan, const std::vector& px_offset, const LidarScanFieldTypes& field_types) { @@ -170,7 +168,7 @@ ScanData scanEncodeFieldsSingleThread(const LidarScan& lidar_scan, return fields_data; } - +#else ScanData scanEncodeFields(const LidarScan& lidar_scan, const std::vector& px_offset, const LidarScanFieldTypes& field_types) { @@ -185,7 +183,7 @@ ScanData scanEncodeFields(const LidarScan& lidar_scan, const size_t fields_num = field_types.size(); // Number of fields to pack into a single thread coder size_t per_thread_num = (fields_num + con_num - 1) / con_num; - std::vector coders{}; + std::vector> futures{}; size_t scan_idx = 0; for (size_t t = 0; t < con_num && t * per_thread_num < fields_num; ++t) { // Per every thread we pack the `per_thread_num` field_types encodings @@ -203,15 +201,18 @@ ScanData scanEncodeFields(const LidarScan& lidar_scan, } // Start an encoder thread with selected fields and corresponding // indices list - coders.emplace_back(std::thread{fieldEncodeMulti, std::cref(lidar_scan), + futures.emplace_back(std::async(fieldEncodeMulti, std::cref(lidar_scan), thread_fields, std::cref(px_offset), - std::ref(fields_data), thread_idxs}); + std::ref(fields_data), thread_idxs)); } - for (auto& t : coders) t.join(); + for (auto& t : futures) { + t.get(); + } return fields_data; } +#endif template bool encode8bitImage(ScanChannelData& res_buf, @@ -617,17 +618,15 @@ template bool encode64bitImage( template bool encode64bitImage( ScanChannelData&, const Eigen::Ref>&); -bool fieldEncodeMulti(const LidarScan& lidar_scan, +void fieldEncodeMulti(const LidarScan& lidar_scan, const LidarScanFieldTypes& field_types, const std::vector& px_offset, ScanData& scan_data, const std::vector& scan_idxs) { if (field_types.size() != scan_idxs.size()) { - std::cerr << "ERROR: in fieldEncodeMulti field_types.size() should " - "match scan_idxs.size()" - << std::endl; - std::abort(); + throw std::invalid_argument( + "ERROR: in fieldEncodeMulti field_types.size() should " + "match scan_idxs.size()"); } - auto res_err = false; for (size_t i = 0; i < field_types.size(); ++i) { auto err = fieldEncode(lidar_scan, field_types[i], px_offset, scan_data, scan_idxs[i]); @@ -638,9 +637,7 @@ bool fieldEncodeMulti(const LidarScan& lidar_scan, "fieldEncodeMulti)" << std::endl; } - res_err = res_err || err; } - return res_err; } bool fieldEncode( @@ -648,9 +645,9 @@ bool fieldEncode( const std::pair field_type, const std::vector& px_offset, ScanData& scan_data, size_t scan_idx) { if (scan_idx >= scan_data.size()) { - std::cerr << "ERROR: scan_data size is not sufficient to hold idx: " - << scan_idx << std::endl; - std::abort(); + throw std::invalid_argument( + "ERROR: scan_data size is not sufficient to hold idx: " + + std::to_string(scan_idx)); } bool res = true; switch (field_type.second) { @@ -745,10 +742,9 @@ bool fieldDecodeMulti(LidarScan& lidar_scan, const ScanData& scan_data, const LidarScanFieldTypes& field_types, const std::vector& px_offset) { if (field_types.size() != scan_idxs.size()) { - std::cerr << "ERROR: in fieldDecodeMulti field_types.size() should " - "match scan_idxs.size()" - << std::endl; - std::abort(); + throw std::invalid_argument( + "ERROR: in fieldDecodeMulti field_types.size() should " + "match scan_idxs.size()"); } auto res_err = false; for (size_t i = 0; i < field_types.size(); ++i) { @@ -762,7 +758,7 @@ bool fieldDecodeMulti(LidarScan& lidar_scan, const ScanData& scan_data, } return res_err; } - +#ifdef OUSTER_OSF_NO_THREADING bool scanDecodeFieldsSingleThread(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& px_offset) { @@ -784,7 +780,9 @@ bool scanDecodeFieldsSingleThread(LidarScan& lidar_scan, } return false; } - +#else +// TWS 20240301 TODO: determine if we can deduplicate this code (see +// scanEncodeFields) bool scanDecodeFields(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& px_offset) { LidarScanFieldTypes field_types(lidar_scan.begin(), lidar_scan.end()); @@ -802,7 +800,7 @@ bool scanDecodeFields(LidarScan& lidar_scan, const ScanData& scan_data, // Number of fields to pack into a single thread coder size_t per_thread_num = (fields_num + con_num - 1) / con_num; - std::vector coders{}; + std::vector> futures{}; size_t scan_idx = 0; for (size_t t = 0; t < con_num && t * per_thread_num < fields_num; ++t) { @@ -823,15 +821,22 @@ bool scanDecodeFields(LidarScan& lidar_scan, const ScanData& scan_data, // Start a decoder thread with selected fields and corresponding // indices list - coders.emplace_back(std::thread{fieldDecodeMulti, std::ref(lidar_scan), + futures.emplace_back(std::async(fieldDecodeMulti, std::ref(lidar_scan), std::cref(scan_data), thread_idxs, - thread_fields, std::cref(px_offset)}); + thread_fields, std::cref(px_offset))); } - for (auto& t : coders) t.join(); + for (auto& t : futures) { + // TODO: refactor, use return std::all + bool res = t.get(); + if (!res) { + return false; + } + } return false; } +#endif template bool decode24bitImage(Eigen::Ref> img, @@ -1375,24 +1380,5 @@ template bool decode8bitImage(Eigen::Ref>, template bool decode8bitImage(Eigen::Ref>, const ScanChannelData&); -// =================== Save to File Functions ==================== - -bool saveScanChannel(const ScanChannelData& channel_buf, - const std::string& filename) { - std::fstream file(filename, std::ios_base::out | std::ios_base::binary); - - if (file.good()) { - file.write(reinterpret_cast(channel_buf.data()), - channel_buf.size()); - if (file.good()) { - file.close(); - return false; // SUCCESS - } - } - - file.close(); - return true; // FAILURE -} - } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/png_tools.h b/ouster_osf/src/png_tools.h index 22e799fb..a45896d0 100644 --- a/ouster_osf/src/png_tools.h +++ b/ouster_osf/src/png_tools.h @@ -34,37 +34,39 @@ using LidarScanFieldTypes = * Decode the PNG buffers into LidarScan object. This is a dispatch function to * the specific decoding functions. * - * @param lidar_scan the output object that will be filled as a result of - * decoding - * @param scan_data PNG buffers to decode - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @param[out] lidar_scan The output object that will be filled as a result of + * decoding. + * @param[in] scan_data PNG buffers to decode. + * @param[in] px_offset Pixel shift per row used to reconstruct staggered range + * image form. * @return false (0) if operation is successful true (1) if error occured */ bool scanDecode(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& px_offset); +#ifdef OUSTER_OSF_NO_THREADING /// Decoding eUDP LidarScan // TODO[pb]: Make decoding of just some fields from scan data?? Not now ... bool scanDecodeFieldsSingleThread(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& px_offset); - +#else /// Decoding eUDP LidarScan, multithreaded version bool scanDecodeFields(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& px_offset); +#endif /** * Decode a single field to lidar_scan * - * @param lidar_scan the output object that will be filled as a result of - * decoding - * @param scan_data PNG buffers to decode - * @param scan_idx index in `scan_data` of the beginning of field buffers - * @param field_type the field of `lidar_scan` to fill in with the docoded - * result - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @param[out] lidar_scan The output object that will be filled as a result of + * decoding. + * @param[in] scan_data PNG buffers to decode. + * @param[in] scan_idx Index in `scan_data` of the beginning of field buffers. + * @param[in] field_type The field of `lidar_scan` to fill in with the decoded + * result. + * @param[in] px_offset Pixel shift per row used to reconstruct staggered range + * image form. * @return false (0) if operation is successful true (1) if error occured */ bool fieldDecode( @@ -75,17 +77,17 @@ bool fieldDecode( /** * Decode multiple fields to lidar_scan * - * @param lidar_scan the output object that will be filled as a result of - * decoding - * @param scan_data PNG buffers to decode, sequentially in the order of - * field_types - * @param scan_idxs a vector of indices in `scan_data` of the beginning of + * @param[out] lidar_scan The output object that will be filled as a result of + * decoding. + * @param[in] scan_data PNG buffers to decode, sequentially in the order of + * field_types + * @param[in] scan_idxs a vector of indices in `scan_data` of the beginning of * field buffers that will be decoded. `field_types.size()` * should be equal to `scan_idxs.size()` i.e. we need to * provide the index for every field type in * field_types where it's encoded data located - * @param field_types a vector of filed_types of lidar scan to decode - * @param px_offset pixel shift per row used to reconstruct staggered range + * @param[in] field_types a vector of filed_types of lidar scan to decode + * @param[in] px_offset pixel shift per row used to reconstruct staggered range * image form * @return false (0) if operation is successful true (1) if error occured */ @@ -93,92 +95,142 @@ bool fieldDecodeMulti(LidarScan& lidar_scan, const ScanData& scan_data, const std::vector& scan_idxs, const LidarScanFieldTypes& field_types, const std::vector& px_offset); +/** + * @defgroup OSFPngDecode8 Decoding Functionality. + * Decode single PNG buffer (channel_buf) of 8 bit Gray encoding into + * img. + * + * @tparam T The type to use for the output array. + * + * @param[out] img The output img that will be filled as a result of + * decoding. + * @param[in] channel_buf Single PNG buffer to decode. + * @return false (0) if operation is successful true (1) if error occured + */ +/** @copydoc OSFPngDecode8 */ template bool decode8bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf); +/** + * @copydoc OSFPngDecode8 + * @param[in] px_offset pixel shift per row used to reconstruct staggered range + * image form + */ template bool decode8bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf, const std::vector& px_offset); /** + * @defgroup OSFPngDecode16 Decoding Functionality. * Decode single PNG buffer (channel_buf) of 16 bit Gray encoding into - * img + * img. * - * @param img the output img that will be filled as a result of - * decoding - * @param channel_buf single PNG buffer to decode - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @tparam T The type to use for the output array. + * + * @param[out] img The output img that will be filled as a result of + * decoding. + * @param[in] channel_buf Single PNG buffer to decode. * @return false (0) if operation is successful true (1) if error occured */ + +/** + * @copydoc OSFPngDecode16 + * @param[in] px_offset pixel shift per row used to reconstruct staggered range + * image form + */ template bool decode16bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf, const std::vector& px_offset); +/** @copydoc OSFPngDecode16 */ template bool decode16bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf); /** + * @defgroup OSFPngDecode24 Decoding Functionality. * Decode single PNG buffer (channel_buf) of 24 bit RGB (8 bit) encoding into * img object. * - * @param img the output img that will be filled as a result of - * decoding - * @param channel_buf single PNG buffer to decode - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @tparam T The type to use for the output array. + * + * @param[out] img The output img that will be filled as a result of + * decoding. + * @param[in] channel_buf Single PNG buffer to decode. * @return false (0) if operation is successful true (1) if error occured */ + +/** + * @copydoc OSFPngDecode24 + * @param[in] px_offset Pixel shift per row used to reconstruct staggered range + * image form. + */ template bool decode24bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf, const std::vector& px_offset); +/** @copydoc OSFPngDecode24 */ template bool decode24bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf); /** + * @defgroup OSFPngDecode32 Decoding Functionality. * Decode single PNG buffer (channel_buf) of 32 bit RGBA (8 bit) encoding into * img object. * - * @param img the output img that will be filled as a result of - * decoding - * @param channel_buf single PNG buffer to decode - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @tparam T The type to use for the output array. + * + * @param[out] img The output img that will be filled as a result of + * decoding. + * @param[in] channel_buf Single PNG buffer to decode. * @return false (0) if operation is successful true (1) if error occured */ + +/** + * @copydoc OSFPngDecode32 + * @param[in] px_offset Pixel shift per row used to reconstruct staggered range + * image form. + */ template bool decode32bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf, const std::vector& px_offset); +/** @copydoc OSFPngDecode32 */ template bool decode32bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf); /** + * @defgroup OSFPngDecode64 Decoding Functionality. * Decode single PNG buffer (channel_buf) of 64 bit RGBA (16 bit) encoding into * img object. * - * @param img the output img that will be filled as a result of - * decoding - * @param channel_buf single PNG buffer to decode - * @param px_offset pixel shift per row used to reconstruct staggered range - * image form + * @tparam T The type to use for the output array. + * + * @param[out] img The output img that will be filled as a result of + * decoding. + * @param[in] channel_buf Single PNG buffer to decode. * @return false (0) if operation is successful true (1) if error occured */ + +/** + * @copydoc OSFPngDecode64 + * @param[in] px_offset Pixel shift per row used to reconstruct staggered range + * image form. + */ template bool decode64bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf, const std::vector& px_offset); +/** @copydoc OSFPngDecode64 */ template bool decode64bitImage(Eigen::Ref> img, const ScanChannelData& channel_buf); @@ -188,50 +240,53 @@ bool decode64bitImage(Eigen::Ref> img, /** * Encode LidarScan to PNG buffers storing all field_types present in an object. * - * @param lidar_scan the LidarScan object to encode - * @param px_offset pixel shift per row used to destaggered LidarScan data + * @param[in] lidar_scan The LidarScan object to encode. + * @param[in] px_offset Pixel shift per row used to + * destaggered LidarScan data. * @return encoded PNG buffers, empty() if error occured. */ ScanData scanEncode(const LidarScan& lidar_scan, const std::vector& px_offset); +#ifdef OUSTER_OSF_NO_THREADING /** * Encode the lidar scan fields to PNGs channel buffers (ScanData). * Single-threaded implementation. * - * @param lidar_scan a lidar scan object to encode - * @param px_offset pixel shift per row used to construct de-staggered range - * image form - * @return encoded PNGs in ScanData in order of field_types + * @param[in] lidar_scan A lidar scan object to encode. + * @param[in] px_offset Pixel shift per row used to construct de-staggered range + * image form. + * @return Encoded PNGs in ScanData in order of field_types. */ ScanData scanEncodeFieldsSingleThread(const LidarScan& lidar_scan, const std::vector& px_offset, const LidarScanFieldTypes& field_types); - +#else /** * Encode the lidar scan fields to PNGs channel buffers (ScanData). * Multi-threaded implementation. * - * @param lidar_scan a lidar scan object to encode - * @param px_offset pixel shift per row used to construct de-staggered range - * image form - * @return encoded PNGs in ScanData in order of field_types + * @param[in] lidar_scan A lidar scan object to encode. + * @param[in] px_offset Pixel shift per row used to construct de-staggered range + * image form. + * @param[in] field_types The field types to use for encoding. + * @return Encoded PNGs in ScanData in order of field_types. */ ScanData scanEncodeFields(const LidarScan& lidar_scan, const std::vector& px_offset, const LidarScanFieldTypes& field_types); - +#endif /** * Encode a single lidar scan field to PNGs channel buffer and place it to a * specified `scan_data[scan_idx]` place * - * @param lidar_scan a lidar scan object to encode - * @param field_type a filed_type of lidar scan to encode - * @param px_offset pixel shift per row used to construct de-staggered range - * image form - * @param scan_data channel buffers storage for the encoded lidar_scan - * @param scan_idx index in `scan_data` of the beginning of field buffers where - * the result of encoding will be inserted + * @param[in] lidar_scan a lidar scan object to encode + * @param[in] field_type a filed_type of lidar scan to encode + * @param[in] px_offset pixel shift per row used to construct de-staggered + * range image form + * @param[out] scan_data channel buffers storage for the encoded lidar_scan + * @param[in] scan_idx index in `scan_data` of the beginning of field buffers + * where the result of encoding will be inserted * @return false (0) if operation is successful true (1) if error occured */ bool fieldEncode( @@ -243,26 +298,43 @@ bool fieldEncode( * Encode multiple lidar scan fields to PNGs channel buffers and insert them to * a specified places `scan_idxs` in `scan_data`. * - * @param lidar_scan a lidar scan object to encode - * @param field_types a vector of filed_types of lidar scan to encode - * @param px_offset pixel shift per row used to construct de-staggered range - * image form - * @param scan_data channel buffers storage for the encoded lidar_scan - * @param scan_idxs a vector of indices in `scan_data` of the beginning of field - * buffers where the result of encoding will be inserted. - * `field_types.size()` should be equal to `scan_idxs.size()` - * @return false (0) if operation is successful true (1) if error occured + * @param[in] lidar_scan a lidar scan object to encode + * @param[in] field_types a vector of filed_types of + * lidar scan to encode + * @param[in] px_offset pixel shift per row used to construct de-staggered range + * image form + * @param[out] scan_data channel buffers storage for the encoded lidar_scan + * @param[in] scan_idxs a vector of indices in `scan_data` of the beginning of + * field buffers where the result of encoding will be + * inserted. `field_types.size()` should be equal to + * `scan_idxs.size()` */ -bool fieldEncodeMulti(const LidarScan& lidar_scan, +void fieldEncodeMulti(const LidarScan& lidar_scan, const LidarScanFieldTypes& field_types, const std::vector& px_offset, ScanData& scan_data, const std::vector& scan_idxs); +/** + * @defgroup OSFPngEncode8 Encoding Functionality. + * Encode img object into a 8 bit, Gray, PNG buffer. + * + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img The image object to encode. + * @return false (0) if operation is successful, true (1) if error occured + */ + +/** + * @copydoc OSFPngEncode8 + * @param[in] px_offset Pixel shift per row used to destagger img data. + */ template bool encode8bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img, const std::vector& px_offset); +/** @copydoc OSFPngEncode8 */ template bool encode8bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img); @@ -270,9 +342,11 @@ bool encode8bitImage(ScanChannelData& res_buf, /** * Encode img object into a 16 bit, Gray, PNG buffer. * - * @param res_buf the output buffer with a single encoded PNG - * @param img the image object to encode - * @param px_offset pixel shift per row used to destagger img data + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img The image object to encode. + * @param[in] px_offset Pixel shift per row used to destagger img data. * @return false (0) if operation is successful, true (1) if error occured */ template @@ -284,8 +358,10 @@ bool encode16bitImage(ScanChannelData& res_buf, * Encode 2D image of a typical lidar scan field channel into a 16 bit, Gray, * PNG buffer. * - * @param res_buf the output buffer with a single encoded PNG - * @param img 2D image or a single LidarScan field data + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img The image object to encode. * @return false (0) if operation is successful, true (1) if error occured */ template @@ -293,70 +369,82 @@ bool encode16bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img); /** + * @defgroup OSFPngEncode32 Encoding Functionality. * Encode 2D image of a typical lidar scan field channel into a 32 bit, RGBA, * PNG buffer. * - * @param res_buf the output buffer with a single encoded PNG - * @param img 2D image or a single LidarScan field data - * @param px_offset pixel shift per row used to destagger img data + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img 2D image or a single LidarScan field data. * @return false (0) if operation is successful, true (1) if error occured */ + +/** + * @copydoc OSFPngEncode32 + * @param[in] px_offset Pixel shift per row used to destagger img data. + */ template bool encode32bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img, const std::vector& px_offset); +/** @copydoc OSFPngEncode32 */ template bool encode32bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img); /** + * @defgroup OSFPngEncode24 Encoding Functionality. * Encode 2D image of a typical lidar scan field channel into a 24 bit, RGB, * PNG buffer. * - * @param res_buf the output buffer with a single encoded PNG - * @param img 2D image or a single LidarScan field data - * @param px_offset pixel shift per row used to destagger img data + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img 2D image or a single LidarScan field data. * @return false (0) if operation is successful, true (1) if error occured */ + +/** + * @copydoc OSFPngEncode24 + * @param[in] px_offset Pixel shift per row used to destagger img data. + */ template bool encode24bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img, const std::vector& px_offset); +/** @copydoc OSFPngEncode24 */ template bool encode24bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img); /** + * @defgroup OSFPngEncode64 Encoding Functionality. * Encode 2D image of a typical lidar scan field channel into a 64 bit, RGBA, * PNG buffer. * - * @param res_buf the output buffer with a single encoded PNG - * @param img 2D image or a single LidarScan field data - * @param px_offset pixel shift per row used to destagger img data + * @tparam T The type to use for the output array. + * + * @param[out] res_buf The output buffer with a single encoded PNG. + * @param[in] img 2D image or a single LidarScan field data. * @return false (0) if operation is successful, true (1) if error occured */ + +/** + * @copydoc OSFPngEncode64 + * @param[in] px_offset Pixel shift per row used to destagger img data. + */ template bool encode64bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img, const std::vector& px_offset); +/** @copydoc OSFPngEncode64 */ template bool encode64bitImage(ScanChannelData& res_buf, const Eigen::Ref>& img); -// =================== Save to File Functions ==================== - -/** - * Save PNG encoded scan channel buffer to the PNG file. - * - * @param channel_buf single PNG buffer to decode - * @param filename file name of output PNG image - * @return false (0) if operation is successful, true (1) if error occured - */ -bool saveScanChannel(const ScanChannelData& channel_buf, - const std::string& filename); - } // namespace osf } // namespace ouster diff --git a/ouster_osf/src/reader.cpp b/ouster_osf/src/reader.cpp index 08067fac..11e399bf 100644 --- a/ouster_osf/src/reader.cpp +++ b/ouster_osf/src/reader.cpp @@ -16,6 +16,9 @@ #include "ouster/osf/metadata.h" #include "ouster/types.h" +using StreamChunksMap = + std::unordered_map>>; + namespace ouster { namespace osf { @@ -30,6 +33,7 @@ inline const ouster::osf::v2::Chunk* get_chunk_from_buf(const uint8_t* buf) { // ======================================================= // =========== ChunksPile ================================ // ======================================================= +ChunksPile::ChunksPile() {} void ChunksPile::add(uint64_t offset, ts_t start_ts, ts_t end_ts) { ChunkState cs{}; @@ -120,25 +124,23 @@ ChunkState* ChunksPile::next_by_stream(uint64_t offset) { ChunkState* ChunksPile::first() { return get(0); } -ChunksPile::ChunkStateIter ChunksPile::begin() { return pile_.begin(); } - -ChunksPile::ChunkStateIter ChunksPile::end() { return pile_.end(); } - size_t ChunksPile::size() const { return pile_.size(); } -bool ChunksPile::has_info() const { - return !pile_info_.empty() && pile_info_.size() == pile_.size(); -} - bool ChunksPile::has_message_idx() const { + // return true if we have no chunks (we're functionally indexed) + if (pile_.size() == 0) { + return true; + } // rely on the fact that message_count in the ChunkInfo, if present // during Writing/Chunk building, can't be 0 (by construction in // ChunkBuilder and StreamingLayoutCW) // In other words we can't have Chunks with 0 messages written to OSF // file - return has_info() && pile_info_.begin()->second.message_count > 0; + return pile_info_.size() && pile_info_.begin()->second.message_count > 0; } +StreamChunksMap& ChunksPile::stream_chunks() { return stream_chunks_; } + void ChunksPile::link_stream_chunks() { // This function does a couple of things: // 1. Fills the stream_chunks_ map with offsets of chunks per stream id @@ -151,7 +153,7 @@ void ChunksPile::link_stream_chunks() { stream_chunks_.clear(); - if (has_info()) { + if (pile_info_.size()) { // Do the next_offset links by streams auto curr_chunk = first(); while (curr_chunk != nullptr) { @@ -238,12 +240,6 @@ ChunksIter& ChunksIter::operator++() { return *this; } -ChunksIter ChunksIter::operator++(int) { - auto res = *this; - this->next(); - return res; -} - void ChunksIter::next() { if (current_addr_ == end_addr_) return; next_any(); @@ -303,108 +299,6 @@ std::string ChunksRange::to_string() const { return ss.str(); } -// ========================================================== -// ========= Reader::MessagesStandardIter =================== -// ========================================================== - -MessagesStandardIter::MessagesStandardIter() - : current_chunk_it_{}, end_chunk_it_{}, msg_idx_{0} {} - -MessagesStandardIter::MessagesStandardIter(const MessagesStandardIter& other) - : current_chunk_it_(other.current_chunk_it_), - end_chunk_it_(other.end_chunk_it_), - msg_idx_(other.msg_idx_) {} - -MessagesStandardIter::MessagesStandardIter(const ChunksIter begin_it, - const ChunksIter end_it, - const size_t msg_idx) - : current_chunk_it_{begin_it}, end_chunk_it_{end_it}, msg_idx_{msg_idx} { - if (current_chunk_it_ != end_chunk_it_ && !is_cleared()) next(); -} - -const MessageRef MessagesStandardIter::operator*() const { - return current_chunk_it_->operator[](msg_idx_); -} - -std::unique_ptr MessagesStandardIter::operator->() const { - return current_chunk_it_->messages(msg_idx_); -} - -MessagesStandardIter& MessagesStandardIter::operator++() { - this->next(); - return *this; -} - -MessagesStandardIter MessagesStandardIter::operator++(int) { - auto res = *this; - this->next(); - return res; -} - -void MessagesStandardIter::next() { - if (current_chunk_it_ == end_chunk_it_) return; - next_any(); - while (current_chunk_it_ != end_chunk_it_ && !is_cleared()) next_any(); -} - -void MessagesStandardIter::next_any() { - if (current_chunk_it_ == end_chunk_it_) return; - auto chunk_ref = *current_chunk_it_; - ++msg_idx_; - if (msg_idx_ >= chunk_ref.size()) { - // Advance to the next chunk - ++current_chunk_it_; - msg_idx_ = 0; - } -} - -bool MessagesStandardIter::operator==(const MessagesStandardIter& other) const { - return (current_chunk_it_ == other.current_chunk_it_ && - end_chunk_it_ == other.end_chunk_it_ && msg_idx_ == other.msg_idx_); -} - -bool MessagesStandardIter::operator!=(const MessagesStandardIter& other) const { - return !this->operator==(other); -} - -bool MessagesStandardIter::is_cleared() { - if (current_chunk_it_ == end_chunk_it_) return false; - const auto chunk_ref = *current_chunk_it_; - if (!chunk_ref.valid()) return false; - return (msg_idx_ < chunk_ref.size()); -} - -std::string MessagesStandardIter::to_string() const { - std::stringstream ss; - ss << "MessagesStandardIter: [curr_chunk_it = " - << current_chunk_it_.to_string() << ", msg_idx = " << msg_idx_ - << ", end_chunk_it = " << end_chunk_it_.to_string() << "]"; - return ss.str(); -} - -// ========================================================= -// ========= Reader::MessagesStandardRange ========================= -// ========================================================= - -MessagesStandardRange::MessagesStandardRange(const ChunksIter begin_it, - const ChunksIter end_it) - : begin_chunk_it_(begin_it), end_chunk_it_(end_it) {} - -MessagesStandardIter MessagesStandardRange::begin() const { - return MessagesStandardIter(begin_chunk_it_, end_chunk_it_, 0); -} - -MessagesStandardIter MessagesStandardRange::end() const { - return MessagesStandardIter(end_chunk_it_, end_chunk_it_, 0); -} - -std::string MessagesStandardRange::to_string() const { - std::stringstream ss; - ss << "MessagesStandardRange: [bit = " << begin_chunk_it_.to_string() - << ", eit = " << end_chunk_it_.to_string() << "]"; - return ss.str(); -} - // ========================================================== // ========= Reader ========================================= // ========================================================== @@ -485,9 +379,7 @@ nonstd::optional Reader::ts_by_message_idx(uint32_t stream_id, return nonstd::nullopt; } -MessagesStandardRange Reader::messages_standard() { - return MessagesStandardRange(chunks().begin(), chunks().end()); -} +bool Reader::has_message_idx() const { return chunks_.has_message_idx(); }; ChunksRange Reader::chunks() { return ChunksRange(0, file_.metadata_offset(), this); @@ -581,6 +473,7 @@ void Reader::read_chunks_info() { // see RFC0018 for details auto streaming_info = meta_store_.get(); if (!streaming_info) { + has_streaming_info_ = false; return; } @@ -595,20 +488,14 @@ void Reader::read_chunks_info() { sci.second.message_count); } + has_streaming_info_ = true; + chunks_.link_stream_chunks(); } // TODO[pb]: MetadataStore to_string() ? -void Reader::print_metadata_entries() { - std::cout << "Reader::print_metadata_entries:\n"; - int i = 0; - for (const auto& me : meta_store_.entries()) { - std::cout << " entry[" << i++ << "] = " << me.second->to_string() - << std::endl; - } -} -std::string Reader::id() const { +std::string Reader::metadata_id() const { if (auto metadata = get_osf_metadata_from_buf(metadata_buf_.data())) { if (metadata->id()) { return metadata->id()->str(); @@ -631,7 +518,9 @@ ts_t Reader::end_ts() const { return ts_t{}; } -bool Reader::has_stream_info() const { return chunks_.has_info(); } +const MetadataStore& Reader::meta_store() const { return meta_store_; } + +bool Reader::has_stream_info() const { return has_streaming_info_; } bool Reader::verify_chunk(uint64_t chunk_offset) { auto cs = chunks_.get(chunk_offset); @@ -649,6 +538,12 @@ bool Reader::verify_chunk(uint64_t chunk_offset) { // ========================================================= // ========= MessageRef ==================================== // ========================================================= +MessageRef::MessageRef(const uint8_t* buf, const MetadataStore& meta_provider) + : buf_(buf), meta_provider_(meta_provider), chunk_buf_{nullptr} {} + +MessageRef::MessageRef(const uint8_t* buf, const MetadataStore& meta_provider, + std::shared_ptr> chunk_buf) + : buf_(buf), meta_provider_(meta_provider), chunk_buf_{chunk_buf} {} uint32_t MessageRef::id() const { const ouster::osf::v2::StampedMessage* sm = @@ -662,6 +557,8 @@ MessageRef::ts_t MessageRef::ts() const { return ts_t(sm->ts()); } +const uint8_t* MessageRef::buf() const { return buf_; } + bool MessageRef::is(const std::string& type_str) const { auto meta = meta_provider_.get(id()); return (meta != nullptr) && (meta->type() == type_str); @@ -721,6 +618,20 @@ ChunkRef::ChunkRef(const uint64_t offset, Reader* reader) ChunkValidity::UNKNOWN); } +ChunkState* ChunkRef::state() { return reader_->chunks_.get(chunk_offset_); } + +const ChunkState* ChunkRef::state() const { + return reader_->chunks_.get(chunk_offset_); +} + +ChunkInfoNode* ChunkRef::info() { + return reader_->chunks_.get_info(chunk_offset_); +} + +const ChunkInfoNode* ChunkRef::info() const { + return reader_->chunks_.get_info(chunk_offset_); +} + size_t ChunkRef::size() const { if (!valid()) return 0; const ouster::osf::v2::Chunk* chunk = get_chunk_from_buf(get_chunk_ptr()); @@ -780,6 +691,12 @@ std::string ChunkRef::to_string() const { return ss.str(); } +uint64_t ChunkRef::offset() const { return chunk_offset_; } + +ts_t ChunkRef::start_ts() const { return state()->start_ts; } + +ts_t ChunkRef::end_ts() const { return state()->end_ts; } + const uint8_t* ChunkRef::get_chunk_ptr() const { if (reader_->file_.is_memory_mapped()) { return reader_->file_.buf() + reader_->chunks_base_offset_ + @@ -876,6 +793,11 @@ uint32_t calc_stream_ids_hash(const std::vector& stream_ids) { return hash; } +bool MessagesStreamingIter::greater_chunk_type::operator()( + const opened_chunk_type& a, const opened_chunk_type& b) { + return a.first[a.second].ts() > b.first[b.second].ts(); +} + MessagesStreamingIter::MessagesStreamingIter() : curr_ts_{}, end_ts_{}, @@ -1057,18 +979,6 @@ void MessagesStreamingIter::next() { } } -/// NOTE: Debug function, will be removed after some time ... -void MessagesStreamingIter::print_and_finish() { - while (!curr_chunks_.empty()) { - auto& top = curr_chunks_.top(); - std::cout << "(( ts = " << top.first[top.second].ts().count() - << ", id = " << top.first[top.second].id() - << ", msg_idx = " << top.second - << ", cref = " << top.first.to_string() << std::endl; - curr_chunks_.pop(); - } -} - std::string MessagesStreamingIter::to_string() const { std::stringstream ss; ss << "MessagesStreamingIter: [curr_ts = " << curr_ts_.count() @@ -1114,4 +1024,4 @@ std::string MessagesStreamingRange::to_string() const { } } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/src/stream_lidar_scan.cpp b/ouster_osf/src/stream_lidar_scan.cpp index d96f6c0b..ef47e815 100644 --- a/ouster_osf/src/stream_lidar_scan.cpp +++ b/ouster_osf/src/stream_lidar_scan.cpp @@ -6,6 +6,7 @@ #include "ouster/osf/stream_lidar_scan.h" #include +#include #include "ouster/lidar_scan.h" #include "ouster/osf/basics.h" @@ -45,7 +46,6 @@ bool poses_present(const LidarScan& ls) { ls.pose().end(); } -// TODO[pb]: Error if field_types is not subset of fields in ls? LidarScan slice_with_cast(const LidarScan& ls_src, const LidarScanFieldTypes& field_types) { LidarScan ls_dest{static_cast(ls_src.w), @@ -68,7 +68,9 @@ LidarScan slice_with_cast(const LidarScan& ls_src, ouster::impl::copy_and_cast(), ls_src, ft.first); } else { - ouster::impl::visit_field(ls_dest, ft.first, zero_field()); + throw std::invalid_argument("Required field '" + + sensor::to_string(ft.first) + + "' is missing from scan."); } } @@ -109,29 +111,21 @@ flatbuffers::Offset create_lidar_scan_msg( const LidarScanFieldTypes meta_field_types) { auto ls = lidar_scan; if (!meta_field_types.empty()) { - // Make a reduced field LidarScan (or extend if the field types is - // different) - // TODO[pb]: Consider to error instead of extending LidarScan, but be - // sure to check the consistence everywhere. That's why it's - // not done on the first pass here ... + // Make a reduced field LidarScan (erroring if we are missing anything) ls = slice_with_cast(lidar_scan, meta_field_types); } - // Encode LidarScan to PNG buffers ScanData scan_data = scanEncode(ls, info.format.pixel_shift_by_row); - // Prepare PNG encoded channels for LidarScanMsg.channels vector std::vector> channels; for (const auto& channel_data : scan_data) { channels.emplace_back(gen::CreateChannelDataDirect(fbb, &channel_data)); } - // Prepare field_types for LidarScanMsg std::vector field_types; for (const auto& f : ls) { field_types.emplace_back(to_osf_enum(f.first), to_osf_enum(f.second)); } - auto channels_off = fbb.CreateVector<::flatbuffers::Offset>(channels); auto field_types_off = osf::CreateVectorOfStructs( @@ -141,13 +135,11 @@ flatbuffers::Offset create_lidar_scan_msg( auto measurement_id_off = fbb.CreateVector(ls.measurement_id().data(), ls.w); auto status_off = fbb.CreateVector(ls.status().data(), ls.w); - flatbuffers::Offset> pose_off = 0; if (poses_present(ls)) { pose_off = fbb.CreateVector(ls.pose().data()->data(), ls.pose().size() * 16); } - auto packet_timestamp_id_off = fbb.CreateVector( ls.packet_timestamp().data(), ls.packet_timestamp().size()); return gen::CreateLidarScanMsg( @@ -283,6 +275,17 @@ std::unique_ptr restore_lidar_scan( return ls; } +LidarScanStreamMeta::LidarScanStreamMeta(const uint32_t sensor_meta_id, + const LidarScanFieldTypes field_types) + : sensor_meta_id_{sensor_meta_id}, + field_types_{field_types.begin(), field_types.end()} {} + +uint32_t LidarScanStreamMeta::sensor_meta_id() const { return sensor_meta_id_; } + +const LidarScanFieldTypes& LidarScanStreamMeta::field_types() const { + return field_types_; +} + std::vector LidarScanStreamMeta::buffer() const { flatbuffers::FlatBufferBuilder fbb = flatbuffers::FlatBufferBuilder(512); @@ -342,22 +345,24 @@ std::string LidarScanStreamMeta::repr() const { // ============== LidarScan Stream ops =========================== -LidarScanStream::LidarScanStream(Writer& writer, const uint32_t sensor_meta_id, +LidarScanStream::LidarScanStream(Token /*key*/, Writer& writer, + const uint32_t sensor_meta_id, const LidarScanFieldTypes& field_types) : writer_{writer}, meta_(sensor_meta_id, field_types), sensor_meta_id_(sensor_meta_id) { + // Note key is ignored and just used to gatekeep. // Check sensor and get sensor_info - auto sensor_meta_entry = writer.getMetadata(sensor_meta_id_); + auto sensor_meta_entry = writer.get_metadata(sensor_meta_id_); if (sensor_meta_entry == nullptr) { - std::cerr << "ERROR: can't find sensor_meta_id = " << sensor_meta_id - << std::endl; - std::abort(); + std::stringstream ss; + ss << "ERROR: can't find sensor_meta_id = " << sensor_meta_id; + throw std::logic_error(ss.str()); } sensor_info_ = sensor_meta_entry->info(); - stream_meta_id_ = writer_.addMetadata(meta_); + stream_meta_id_ = writer_.add_metadata(meta_); } // TODO[pb]: Every save func in Streams is uniform, need to nicely extract @@ -365,7 +370,7 @@ LidarScanStream::LidarScanStream(Writer& writer, const uint32_t sensor_meta_id, void LidarScanStream::save(const ouster::osf::ts_t ts, const LidarScan& lidar_scan) { const auto& msg_buf = make_msg(lidar_scan); - writer_.saveMessage(meta_.id(), ts, msg_buf); + writer_.save_message(meta_.id(), ts, msg_buf); } std::vector LidarScanStream::make_msg(const LidarScan& lidar_scan) { @@ -382,9 +387,7 @@ std::unique_ptr LidarScanStream::decode_msg( const std::vector& buf, const LidarScanStream::meta_type& meta, const MetadataStore& meta_provider) { auto sensor = meta_provider.get(meta.sensor_meta_id()); - auto info = sensor->info(); - return restore_lidar_scan(buf, info); } diff --git a/ouster_osf/src/writer.cpp b/ouster_osf/src/writer.cpp index 1b821e44..4f6e5934 100644 --- a/ouster_osf/src/writer.cpp +++ b/ouster_osf/src/writer.cpp @@ -5,23 +5,22 @@ #include "ouster/osf/writer.h" +#include + #include "fb_utils.h" #include "ouster/osf/basics.h" #include "ouster/osf/crc32.h" -#include "ouster/osf/layout_standard.h" #include "ouster/osf/layout_streaming.h" +#include "ouster/osf/stream_lidar_scan.h" constexpr size_t MAX_CHUNK_SIZE = 500 * 1024 * 1024; namespace ouster { namespace osf { -Writer::Writer(const std::string& filename) : Writer(filename, std::string{}) {} - -Writer::Writer(const std::string& filename, const std::string& metadata_id, - uint32_t chunk_size) +Writer::Writer(const std::string& filename, uint32_t chunk_size) : file_name_(filename), - metadata_id_{metadata_id}, + metadata_id_{"ouster_sdk"}, chunks_layout_{ChunksLayout::LAYOUT_STREAMING} { // chunks STREAMING_LAYOUT chunks_writer_ = std::make_shared(*this, chunk_size); @@ -37,19 +36,115 @@ Writer::Writer(const std::string& filename, const std::string& metadata_id, if (header_size_ > 0) { pos_ = static_cast(header_size_); } else { - std::cerr << "ERROR: Can't write to file :(\n"; - std::abort(); + throw std::runtime_error("ERROR: Can't write to file :("); + } +} + +Writer::Writer(const std::string& filename, + const ouster::sensor::sensor_info& info, + const LidarScanFieldTypes& field_types, uint32_t chunk_size) + : Writer(filename, std::vector{info}, + field_types, chunk_size) {} + +Writer::Writer(const std::string& filename, + const std::vector& info, + const LidarScanFieldTypes& field_types, uint32_t chunk_size) + : Writer(filename, chunk_size) { + sensor_info_ = info; + for (uint32_t i = 0; i < info.size(); i++) { + lidar_meta_id_[i] = add_metadata(ouster::osf::LidarSensor(info[i])); + field_types_.push_back(field_types); + } +} + +const std::vector& Writer::sensor_info() const { + return sensor_info_; +} + +const ouster::sensor::sensor_info Writer::sensor_info(int stream_index) const { + return sensor_info_[stream_index]; +} + +uint32_t Writer::sensor_info_count() const { return sensor_info_.size(); } + +uint32_t Writer::add_sensor(const ouster::sensor::sensor_info& info, + const LidarScanFieldTypes& field_types) { + lidar_meta_id_[lidar_meta_id_.size()] = + add_metadata(ouster::osf::LidarSensor(info)); + field_types_.push_back(field_types); + sensor_info_.push_back(info); + return lidar_meta_id_.size() - 1; +} + +void Writer::_save(uint32_t stream_index, const LidarScan& scan, + const ts_t time) { + if (stream_index < lidar_meta_id_.size()) { + auto item = lidar_streams_.find(stream_index); + if (item == lidar_streams_.end()) { + const auto& fields = field_types_[stream_index].size() + ? field_types_[stream_index] + : get_field_types(scan); + lidar_streams_[stream_index] = + std::make_unique( + LidarScanStream::Token(), *this, + lidar_meta_id_[stream_index], fields); + } + lidar_streams_[stream_index]->save(time, scan); + } else { + throw std::logic_error("ERROR: Bad Stream ID"); + } +} + +void Writer::save(uint32_t stream_index, const LidarScan& scan) { + if (is_closed()) { + throw std::logic_error("ERROR: Writer is closed"); + } + ts_t time = ts_t(scan.get_first_valid_packet_timestamp()); + _save(stream_index, scan, time); +} + +void Writer::save(uint32_t stream_index, const LidarScan& scan, const ts_t ts) { + if (is_closed()) { + throw std::logic_error("ERROR: Writer is closed"); + } + _save(stream_index, scan, ts); +} + +void Writer::save(const std::vector& scans) { + if (is_closed()) { + throw std::logic_error("ERROR: Writer is closed"); + } + if (scans.size() != lidar_meta_id_.size()) { + throw std::logic_error( + "ERROR: Scans passed in to writer " + "does not match number of sensor infos"); + } else { + for (uint32_t i = 0; i < scans.size(); i++) { + ts_t time = ts_t(scans[i].get_first_valid_packet_timestamp()); + _save(i, scans[i], time); + } } } +uint32_t Writer::add_metadata(MetadataEntry&& entry) { + return add_metadata(entry); +} + +uint32_t Writer::add_metadata(MetadataEntry& entry) { + return meta_store_.add(entry); +} + +std::shared_ptr Writer::get_metadata( + const uint32_t metadata_id) const { + return meta_store_.get(metadata_id); +} + uint64_t Writer::append(const uint8_t* buf, const uint64_t size) { if (pos_ < 0) { - std::cerr << "ERROR: Writer is not ready (not started?)\n"; - std::abort(); + throw std::logic_error("ERROR: Writer is not ready (not started?)"); } if (finished_) { - std::cerr << "ERROR: Hmm, Writer is finished. \n"; - std::abort(); + throw std::logic_error("ERROR: Hmm, Writer is finished."); } if (size == 0) { std::cout << "nothing to append!!!\n"; @@ -62,18 +157,31 @@ uint64_t Writer::append(const uint8_t* buf, const uint64_t size) { // > > > ===================== Chunk Emiter operations ====================== -void Writer::saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) { +void Writer::save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& msg_buf) { if (!meta_store_.get(stream_id)) { - std::cerr << "ERROR: Attempt to save the non existent stream: id = " - << stream_id << std::endl; - std::abort(); + std::stringstream ss; + ss << "ERROR: Attempt to save the non existent stream: id = " + << stream_id << std::endl; + + throw std::logic_error(ss.str()); + return; } - chunks_writer_->saveMessage(stream_id, ts, msg_buf); + chunks_writer_->save_message(stream_id, ts, msg_buf); } +const MetadataStore& Writer::meta_store() const { return meta_store_; } + +const std::string& Writer::metadata_id() const { return metadata_id_; } + +void Writer::set_metadata_id(const std::string& id) { metadata_id_ = id; } + +const std::string& Writer::filename() const { return file_name_; } + +ChunksLayout Writer::chunks_layout() const { return chunks_layout_; } + uint64_t Writer::emit_chunk(const ts_t chunk_start_ts, const ts_t chunk_end_ts, const std::vector& chunk_buf) { uint64_t saved_bytes = append(chunk_buf.data(), chunk_buf.size()); @@ -87,9 +195,10 @@ uint64_t Writer::emit_chunk(const ts_t chunk_start_ts, const ts_t chunk_end_ts, next_chunk_offset_ += saved_bytes; started_ = true; } else { - std::cerr << "ERROR: Can't save to file. saved_bytes = " << saved_bytes - << std::endl; - std::abort(); + std::stringstream ss; + ss << "ERROR: Can't save to file. saved_bytes = " << saved_bytes + << std::endl; + throw std::logic_error(ss.str()); } return res_chunk_offset; } @@ -113,6 +222,7 @@ std::vector Writer::make_metadata() const { const uint8_t* buf = metadata_fbb.GetBufferPointer(); uint32_t size = metadata_fbb.GetSize(); + // Construct the std::vector from the start/end pointers. return {buf, buf + size}; } @@ -137,31 +247,26 @@ void Writer::close() { header_size_) { finished_ = true; } else { - std::cerr << "ERROR: Can't finish OSF file! Recorded header of " + std::cerr << "ERROR: Can't finish OSF file!" + "Recorded header of " "different sizes ..." << std::endl; - std::abort(); } } else { - std::cerr << "ERROR: Oh, why we are here and didn't finish correctly?" + std::cerr << "ERROR: Oh, why we are here and " + "didn't finish correctly?" << std::endl; - std::abort(); } } -uint32_t Writer::chunk_size() const { - if (chunks_writer_) { - return static_cast(chunks_writer_->chunk_size()); - } - return 0; -} +uint32_t Writer::chunk_size() const { return chunks_writer_->chunk_size(); } Writer::~Writer() { close(); } // ================================================================ -void ChunkBuilder::saveMessage(const uint32_t stream_id, const ts_t ts, - const std::vector& msg_buf) { +void ChunkBuilder::save_message(const uint32_t stream_id, const ts_t ts, + const std::vector& msg_buf) { if (finished_) { std::cerr << "ERROR: ChunkBuilder is finished and can't accept new messages!" @@ -170,9 +275,9 @@ void ChunkBuilder::saveMessage(const uint32_t stream_id, const ts_t ts, } if (fbb_.GetSize() + msg_buf.size() > MAX_CHUNK_SIZE) { - std::cerr << "ERROR: reached max possible chunk size MAX_SIZE" - << std::endl; - std::abort(); + throw std::logic_error( + "ERROR: reached max possible" + " chunk size MAX_SIZE"); } update_start_end(ts); @@ -197,6 +302,10 @@ uint32_t ChunkBuilder::messages_count() const { return static_cast(messages_.size()); } +ts_t ChunkBuilder::start_ts() const { return start_ts_; } + +ts_t ChunkBuilder::end_ts() const { return end_ts_; } + void ChunkBuilder::update_start_end(const ts_t ts) { if (start_ts_ > ts) start_ts_ = ts; if (end_ts_ < ts) end_ts_ = ts; @@ -223,4 +332,4 @@ std::vector ChunkBuilder::finish() { // ================================================================ } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/tests/CMakeLists.txt b/ouster_osf/tests/CMakeLists.txt index d6a470b4..156e2304 100644 --- a/ouster_osf/tests/CMakeLists.txt +++ b/ouster_osf/tests/CMakeLists.txt @@ -1,10 +1,12 @@ cmake_minimum_required(VERSION 3.1.0) find_package(GTest REQUIRED) +find_package(OpenSSL REQUIRED) # Each test file should be in a format "_test.cpp" set(OSF_TESTS_SOURCES png_tools_test.cpp writer_test.cpp + writerv2_test.cpp writer_custom_test.cpp file_test.cpp crc_test.cpp @@ -12,6 +14,8 @@ set(OSF_TESTS_SOURCES png_tools_test.cpp reader_test.cpp operations_test.cpp pcap_source_test.cpp + basics_test.cpp + meta_streaming_info_test.cpp ) message(STATUS "OSF: adding testing .... ") @@ -23,9 +27,13 @@ foreach(TEST_FULL_NAME ${OSF_TESTS_SOURCES}) add_executable(osf_${TEST_FILENAME} ${TEST_FULL_NAME}) set_target_properties(osf_${TEST_FILENAME} PROPERTIES RUNTIME_OUTPUT_DIRECTORY "${CMAKE_BINARY_DIR}/tests") + target_include_directories(osf_${TEST_FILENAME} PRIVATE ${CMAKE_CURRENT_LIST_DIR}/../src) - target_link_libraries(osf_${TEST_FILENAME} ouster_osf - GTest::gtest GTest::gtest_main) + target_link_libraries(osf_${TEST_FILENAME} PRIVATE ouster_osf + GTest::gtest + GTest::gtest_main + OpenSSL::Crypto) + CodeCoverageFunctionality(osf_${TEST_FILENAME}) add_test(NAME osf_${TEST_FILENAME} COMMAND osf_${TEST_FILENAME} --gtest_output=xml:osf_${TEST_FILENAME}.xml) set_tests_properties( @@ -34,4 +42,4 @@ foreach(TEST_FULL_NAME ${OSF_TESTS_SOURCES}) ENVIRONMENT DATA_DIR=${CMAKE_CURRENT_LIST_DIR}/../../tests/ ) -endforeach() \ No newline at end of file +endforeach() diff --git a/ouster_osf/tests/basics_test.cpp b/ouster_osf/tests/basics_test.cpp new file mode 100644 index 00000000..1b3e2709 --- /dev/null +++ b/ouster_osf/tests/basics_test.cpp @@ -0,0 +1,30 @@ +#include "ouster/osf/basics.h" + +#include + +#include + +#include "fb_utils.h" +#include "osf_test.h" + +namespace ouster { +namespace osf { +namespace { + +class BasicsTest : public OsfTestWithData {}; + +TEST_F(BasicsTest, GetBlockSizeTest) { + const std::string test_file_name = + path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf"); + auto size = ouster::osf::file_size(test_file_name); + char* buf = new char[size]; + std::fstream file_stream; + file_stream.open(test_file_name, std::fstream::in | std::fstream::binary); + file_stream.read(buf, size); + EXPECT_EQ(ouster::osf::get_block_size((uint8_t*)buf), 60); + delete[] buf; +} + +} // namespace +} // namespace osf +} // namespace ouster diff --git a/ouster_osf/tests/common.h b/ouster_osf/tests/common.h index 06b11e6c..78546c12 100644 --- a/ouster_osf/tests/common.h +++ b/ouster_osf/tests/common.h @@ -28,6 +28,7 @@ constexpr char OSF_OUTPUT_DIR[] = "test_osf"; using idx = std::ptrdiff_t; +/// @todo move this stuff into a cpp file inline bool get_test_data_dir(std::string& test_data_dir) { std::string test_data_dir_var; if (get_env_var("DATA_DIR", test_data_dir_var)) { diff --git a/ouster_osf/tests/crc_test.cpp b/ouster_osf/tests/crc_test.cpp index 127efd41..aebedcd2 100644 --- a/ouster_osf/tests/crc_test.cpp +++ b/ouster_osf/tests/crc_test.cpp @@ -29,6 +29,11 @@ TEST_F(CrcTest, SmokeSanityCheck) { EXPECT_EQ(0xa1509ef8, crc_rev); } +TEST_F(CrcTest, SmokeSanityCheckAltInit) { + const std::vector data = {0, 1, 2, 3, 4, 5, 6, 7}; + const uint32_t crc = osf::crc32(0L, data.data(), data.size()); + EXPECT_EQ(0x88aa689f, crc); +} } // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/file_ops_test.cpp b/ouster_osf/tests/file_ops_test.cpp index 043dbc7b..adfc4007 100644 --- a/ouster_osf/tests/file_ops_test.cpp +++ b/ouster_osf/tests/file_ops_test.cpp @@ -131,6 +131,140 @@ TEST_F(FileOpsTest, TestFileMapping) { EXPECT_TRUE(mmap_close(file_buf, fsize)); } +TEST_F(FileOpsTest, TruncateFile) { + const int fsize = 1000; + const int trunc_size = 450; + + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "test_file"); + + std::fstream test_file_out; + test_file_out.open(temp_file, std::fstream::out | std::fstream::trunc | + std::fstream::binary); + for (int i = 0; i < fsize; i++) { + test_file_out << (uint8_t)i; + } + test_file_out.close(); + + EXPECT_EQ(file_size(temp_file), fsize); + truncate_file(temp_file, trunc_size); + EXPECT_EQ(file_size(temp_file), trunc_size); + unlink_path(temp_file); + remove_dir(temp_dir); +} + +TEST_F(FileOpsTest, AppendBinaryFileBlank) { + const int fsize = 10; + + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "test_file"); + std::string temp_file2 = path_concat(temp_dir, "test_file2"); + + std::fstream test_file; + test_file.open(temp_file2, std::fstream::out | std::fstream::trunc | + std::fstream::binary); + for (int i = 0; i < fsize; i++) { + test_file << (uint8_t)i; + } + test_file.close(); + + EXPECT_EQ(file_size(temp_file2), fsize); + EXPECT_EQ(append_binary_file(temp_file, temp_file2), fsize); + EXPECT_EQ(file_size(temp_file), fsize); + + test_file.open(temp_file2, std::fstream::in | std::fstream::binary); + for (int i = 0; i < fsize; i++) { + char temp; + test_file.read(&temp, 1); + EXPECT_EQ(temp, (char)i); + } + test_file.close(); + + unlink_path(temp_file); + unlink_path(temp_file2); + remove_dir(temp_dir); +} + +TEST_F(FileOpsTest, AppendBinaryFile) { + const int fsize1 = 100; + const int fsize2 = 50; + + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "test_file"); + std::string temp_file2 = path_concat(temp_dir, "test_file2"); + + std::fstream test_file; + test_file.open(temp_file, std::fstream::out | std::fstream::trunc | + std::fstream::binary); + for (int i = 0; i < fsize1; i++) { + test_file << (uint8_t)i; + } + test_file.close(); + + test_file.open(temp_file2, std::fstream::out | std::fstream::trunc | + std::fstream::binary); + for (int i = 0; i < fsize2; i++) { + test_file << (uint8_t)(i + fsize1); + } + test_file.close(); + + EXPECT_EQ(file_size(temp_file), fsize1); + EXPECT_EQ(file_size(temp_file2), fsize2); + EXPECT_EQ(append_binary_file(temp_file, temp_file2), (fsize1 + fsize2)); + EXPECT_EQ(file_size(temp_file), (fsize1 + fsize2)); + + test_file.open(temp_file, std::fstream::in | std::fstream::binary); + for (int i = 0; i < (fsize1 + fsize2); i++) { + char temp; + test_file.read(&temp, 1); + EXPECT_EQ(temp, (char)i); + } + test_file.close(); + + unlink_path(temp_file); + unlink_path(temp_file2); + remove_dir(temp_dir); +} + +TEST_F(FileOpsTest, CopyTrailingBytes) { + const int fsize1 = 200; + const int offset = 150; + + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "test_file"); + std::string temp_file2 = path_concat(temp_dir, "test_file2"); + + std::fstream test_file; + test_file.open(temp_file, std::fstream::out | std::fstream::trunc | + std::fstream::binary); + for (int i = 0; i < fsize1; i++) { + test_file << (uint8_t)i; + } + test_file.close(); + + EXPECT_EQ(file_size(temp_file), fsize1); + EXPECT_EQ(copy_file_trailing_bytes(temp_file, temp_file2, offset), + (fsize1 - offset)); + EXPECT_EQ(file_size(temp_file2), (fsize1 - offset)); + + std::fstream test_file2; + test_file2.open(temp_file2, std::fstream::in | std::fstream::binary); + for (int i = 0; i < (fsize1 - offset); i++) { + char temp; + test_file2.read(&temp, 1); + EXPECT_EQ(temp, (char)(i + offset)); + } + test_file2.close(); + + unlink_path(temp_file); + unlink_path(temp_file2); + remove_dir(temp_dir); +} + } // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/file_test.cpp b/ouster_osf/tests/file_test.cpp index 0ee061d0..1f052c3e 100644 --- a/ouster_osf/tests/file_test.cpp +++ b/ouster_osf/tests/file_test.cpp @@ -167,4 +167,4 @@ TEST_F(OsfFileTest, OsfFileCheckOutOfRangeAccess) { } // namespace } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/tests/meta_streaming_info_test.cpp b/ouster_osf/tests/meta_streaming_info_test.cpp new file mode 100644 index 00000000..4d879f7a --- /dev/null +++ b/ouster_osf/tests/meta_streaming_info_test.cpp @@ -0,0 +1,32 @@ +#include "ouster/osf/meta_streaming_info.h" + +#include + +#include + +#include "fb_utils.h" +#include "osf_test.h" +#include "ouster/osf/basics.h" + +namespace ouster { +namespace osf { +namespace { + +class MetaStreamingInfoTests : public OsfTestWithData {}; + +/// @todo move this to a better place +TEST_F(MetaStreamingInfoTests, StreamingPrintTests) { + ChunkInfo data = {1, 2, 3}; + EXPECT_EQ(to_string(data), + "{offset = 1, stream_id = 2, message_count = 3}"); + + ts_t t(5678L); + StreamStats data2(4, t, 6); + EXPECT_EQ(to_string(data2), + "{stream_id = 4, start_ts = 5678, end_ts = 5678," + " message_count = 1, message_avg_size = 6}"); +} + +} // namespace +} // namespace osf +} // namespace ouster diff --git a/ouster_osf/tests/metadata_tests.cpp b/ouster_osf/tests/metadata_tests.cpp new file mode 100644 index 00000000..f3275aaf --- /dev/null +++ b/ouster_osf/tests/metadata_tests.cpp @@ -0,0 +1,61 @@ +#include + +#include + +#include "fb_utils.h" +#include "osf_test.h" +#include "ouster/osf/basics.h" +#include "ouster/osf/meta_lidar_sensor.h" +#include "ouster/osf/reader.h" +#include "ouster/osf/stream_lidar_scan.h" + +namespace ouster { +namespace osf { +namespace { + +class MetadataTest : public OsfTestWithData {}; + +TEST_F(MetadataTest, TestDupeMetadata) { + MetadataStore meta_store_ = {}; + LidarScanStreamMeta data(1011121314, {}); + EXPECT_EQ(meta_store_.add(data), 1); + + std::stringstream output_stream; + std::streambuf* old_output_stream = std::cout.rdbuf(); + std::cout.rdbuf(output_stream.rdbuf()); + EXPECT_EQ(meta_store_.add(data), 1); + std::cout.rdbuf(old_output_stream); + EXPECT_EQ(output_stream.str(), + "WARNING: MetadataStore:" + " ENTRY EXISTS! id = 1\n"); +} + +class MetadataTestApi : public ouster::osf::MetadataEntry { + public: + MetadataTestApi(std::string type, std::string static_type, + std::vector buffer) + : _type(type), _static_type(static_type), _buffer(buffer){}; + std::vector buffer() const { return _buffer; }; + std::unique_ptr clone() const { return nullptr; }; + std::string type() const { return _type; }; + std::string static_type() const { return _static_type; }; + + private: + std::string _type; + std::string _static_type; + std::vector _buffer; +}; + +TEST_F(MetadataTest, MiscMetadataEntryTests) { + MetadataTestApi test("Screams And Whispers - Dance With the Dead", + "Good song", {1, 2, 3, 4, 5}); + EXPECT_EQ(test.repr(), "MetadataEntry: 01 02 03 04 05"); + EXPECT_EQ(test.to_string(), + "MetadataEntry: [id = 0, type = Screams And Whispers - Dance " + "With the Dead," + " buffer = {MetadataEntry: 01 02 03 04 05}]"); +} + +} // namespace +} // namespace osf +} // namespace ouster diff --git a/ouster_osf/tests/operations_test.cpp b/ouster_osf/tests/operations_test.cpp index fbc386b2..c17846c0 100644 --- a/ouster_osf/tests/operations_test.cpp +++ b/ouster_osf/tests/operations_test.cpp @@ -6,10 +6,19 @@ #include "ouster/osf/operations.h" #include +#include +#include +#include +#include +#include + +#include "fb_utils.h" +#include "json/json.h" #include "json_utils.h" #include "osf_test.h" #include "ouster/osf/basics.h" +#include "ouster/osf/crc32.h" #include "ouster/osf/file.h" #include "ouster/osf/meta_lidar_sensor.h" #include "ouster/osf/reader.h" @@ -19,6 +28,83 @@ namespace ouster { namespace osf { namespace { +// For some reason windows doesnt like the block_size +// init in the code below +#define BLOCK_SIZE (1024 * 1024) +#define FILESHA_DIGEST_SIZE 64 +class FileSha { + public: + FileSha(const std::string& filename) + : context(EVP_MD_CTX_new()), + block_size(BLOCK_SIZE), + fsize(ouster::osf::file_size(filename)), + digest{0}, + digest_size(FILESHA_DIGEST_SIZE) { + // Using NULL for the openssl C api + if (context == NULL) handleEvpError(); + if (EVP_DigestInit_ex(context, EVP_sha512(), NULL) != 1) { + handleEvpError(); + } + + char buf[BLOCK_SIZE]; + uint64_t i = ouster::osf::file_size(filename); + bool finished = false; + + std::fstream reader; + reader.open(filename, std::fstream::in | std::fstream::binary); + + while (i > 0 && !finished) { + uint64_t size = block_size; + if (i < block_size) { + size = i; + finished = true; + } + reader.read(buf, size); + if (EVP_DigestUpdate(context, buf, size) != 1) { + handleEvpError(); + } + i -= block_size; + } + if (EVP_DigestFinal_ex(context, digest, &digest_size) != 1) { + handleEvpError(); + } + EVP_MD_CTX_free(context); + } + + std::string get_string() { + std::stringstream result; + char buf[3]; + result << "0x"; + for (uint64_t i = 0; i < digest_size; i++) { + // std::hex was misbehaving, just use C + snprintf(buf, 3, "%02x", digest[i]); + result << std::string(buf); + } + return result.str(); + } + + protected: + void handleEvpError() { + const size_t buflen = 100; + char buf[buflen]; + unsigned long errorno; + std::stringstream sstream; + + sstream << "FileSha Sha Errors:" << std::endl; + while ((errorno = ERR_get_error()) != 0) { + ERR_error_string_n(errorno, buf, buflen); + sstream << buf << std::endl; + } + throw sstream.str(); + } + + EVP_MD_CTX* context; + const uint64_t block_size; + int64_t fsize; + unsigned char digest[FILESHA_DIGEST_SIZE]; + unsigned int digest_size; +}; + class OperationsTest : public OsfTestWithDataAndFiles {}; TEST_F(OperationsTest, GetOsfDumpInfo) { @@ -51,31 +137,254 @@ TEST_F(OperationsTest, ParseAndPrintSmoke) { path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf")); } -// TODO[pb]: Remove this test and remove PcapRawSource since it's not mathing -// the python impl. -TEST_F(OperationsTest, PcapToOsf) { - std::string pcap_file = path_concat( - test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10_lb_n3.pcap"); - std::string meta_file = - path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json"); +TEST_F(OperationsTest, FileShaTest) { + std::fstream test_file_out; + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "test_file"); + test_file_out.open(temp_file, std::fstream::out | std::fstream::trunc); + test_file_out << "Testing here for hashing" << std::endl; + test_file_out.close(); + auto sha = FileSha(temp_file); + EXPECT_EQ( + sha.get_string(), + "0x568c47f13b8a96ab5027037c0a44450fd493e91ba92a95bd1f81e23604d8dd99e687" + "6d5bbdf3d5b05ec7b9d03e84fd678690e57a1ecbc40863637deab9a35253"); + + unlink_path(temp_file); + remove_dir(temp_dir); +} + +TEST_F(OperationsTest, BackupMetadataTest) { + std::string osf_file_path = + path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf"); + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + try { + std::string temp_file = path_concat(temp_dir, "temp.osf"); + EXPECT_EQ(append_binary_file(temp_file, osf_file_path), + file_size(osf_file_path)); + auto size1 = file_size(temp_file); + auto sha1 = FileSha(temp_file).get_string(); + std::string temp_backup = path_concat(temp_dir, "temp_backup"); + auto size2 = backup_osf_file_metablob(temp_file, temp_backup); + truncate_file(temp_file, size1 - size2); + auto sha2 = FileSha(temp_file).get_string(); + std::fstream bad_append_out; + bad_append_out.open(temp_file, std::fstream::out | std::fstream::app); + bad_append_out << "Testing here for hashing" << std::endl; + bad_append_out.close(); + auto sha3 = FileSha(temp_file).get_string(); + auto size3 = restore_osf_file_metablob(temp_file, temp_backup); + auto sha4 = FileSha(temp_file).get_string(); + + EXPECT_NE(size1, size2); + EXPECT_EQ(size1, size3); + EXPECT_EQ(sha1, sha4); + EXPECT_NE(sha1, sha2); + EXPECT_NE(sha1, sha3); + EXPECT_NE(sha2, sha3); + unlink_path(temp_file); + unlink_path(temp_backup); + } catch (...) { + remove_dir(temp_dir); + throw; + } + remove_dir(temp_dir); +} + +bool _parse_json(const std::string& json, Json::Value& root) { + Json::CharReaderBuilder build; + JSONCPP_STRING error; + const std::unique_ptr read(build.newCharReader()); + return read->parse(json.c_str(), (json.c_str() + json.length()), &root, + &error); +} - std::string output_osf_filename = tmp_file("pcap_to_osf_test.osf"); +ouster::sensor::sensor_info _gen_new_metadata(int start_number) { + ouster::sensor::sensor_info new_metadata; + new_metadata.name = "Foobar"; + new_metadata.sn = "DEADBEEF"; + new_metadata.fw_rev = "sqrt(-1) friends"; + new_metadata.mode = ouster::sensor::MODE_512x10; + new_metadata.prod_line = "LEEROY JENKINS"; - bool res = pcap_to_osf(pcap_file, meta_file, 7502, output_osf_filename); + new_metadata.format.pixels_per_column = 5; + new_metadata.format.columns_per_packet = 2 + start_number; + new_metadata.format.columns_per_frame = 3 + start_number; + new_metadata.format.pixel_shift_by_row = { + 4 + start_number, 5 + start_number, 6 + start_number, 7 + start_number, + 8 + start_number}; + new_metadata.format.column_window = {9 + start_number, 10 + start_number}; + new_metadata.format.udp_profile_lidar = + ouster::sensor::PROFILE_RNG15_RFL8_NIR8; + new_metadata.format.udp_profile_imu = ouster::sensor::PROFILE_IMU_LEGACY; + new_metadata.format.fps = 11 + start_number; + new_metadata.beam_azimuth_angles = { + 12. + (double)start_number, 13. + (double)start_number, + 14. + (double)start_number, 15. + (double)start_number, + 16. + (double)start_number}; + new_metadata.beam_altitude_angles = { + 17. + (double)start_number, 18. + (double)start_number, + 19. + (double)start_number, 20. + (double)start_number, + 21. + (double)start_number}; + new_metadata.lidar_origin_to_beam_origin_mm = 22 + start_number; - EXPECT_TRUE(res); + new_metadata.init_id = 23 + start_number; + new_metadata.udp_port_lidar = 24 + start_number; + new_metadata.udp_port_imu = 25 + start_number; - OsfFile output_osf_file{output_osf_filename}; - EXPECT_TRUE(output_osf_file.valid()); + new_metadata.build_date = "Made in SAN FRANCISCO"; + new_metadata.image_rev = "IDK, ask someone else"; + new_metadata.prod_pn = "import random; print(random.random())"; + new_metadata.status = "Not just good but great"; - Reader reader{output_osf_file}; + return new_metadata; +} - auto msgs_count = - std::distance(reader.messages().begin(), reader.messages().end()); - EXPECT_EQ(2, msgs_count); +void _verify_empty_metadata(Json::Value& test_root, int entry_count = 0) { + EXPECT_EQ(test_root["metadata"]["chunks"].size(), 0); + EXPECT_EQ(test_root["metadata"]["entries"].size(), entry_count); + EXPECT_EQ(test_root["metadata"]["end_ts"], 0); + EXPECT_EQ(test_root["metadata"]["start_ts"], 0); + EXPECT_EQ(test_root["metadata"]["id"], ""); } -} // namespace +void _write_init_metadata(std::string& temp_file, uint64_t header_size, + MetadataStore meta_store_ = {}) { + // Copied and modified from writer.cpp under osf/src + flatbuffers::FlatBufferBuilder metadata_fbb = + flatbuffers::FlatBufferBuilder(32768); + + std::vector chunks_{}; + + std::vector> entries = + meta_store_.make_entries(metadata_fbb); + char id[4] = {0}; + auto metadata = ouster::osf::gen::CreateMetadataDirect( + metadata_fbb, id, 0, 0, &chunks_, &entries); + + metadata_fbb.FinishSizePrefixed(metadata, + ouster::osf::gen::MetadataIdentifier()); + + const uint8_t* buf = metadata_fbb.GetBufferPointer(); + uint32_t metadata_size = metadata_fbb.GetSize(); + + uint64_t metadata_offset = header_size; + uint64_t metadata_saved_size = + buffer_to_file(buf, metadata_size, temp_file, true); + EXPECT_TRUE(metadata_saved_size && + metadata_saved_size == metadata_size + CRC_BYTES_SIZE); + EXPECT_TRUE(finish_osf_file(temp_file, metadata_offset, + metadata_saved_size) == header_size); +} + +TEST_F(OperationsTest, MetadataRewriteTestSimple) { + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "temp.osf"); + uint64_t header_size = start_osf_file(temp_file); + + _write_init_metadata(temp_file, header_size); + std::string metadata_json = dump_metadata(temp_file, true); + Json::Value test_root{}; + EXPECT_TRUE(_parse_json(metadata_json, test_root)); + + _verify_empty_metadata(test_root); + + ouster::sensor::sensor_info new_metadata = _gen_new_metadata(100); + + osf_file_modify_metadata(temp_file, {new_metadata}); + std::string output_metadata_json = dump_metadata(temp_file, true); + Json::Value output_root{}; + EXPECT_TRUE(_parse_json(output_metadata_json, output_root)); + EXPECT_NE(test_root, output_root); + + Json::Value new_root{}; + EXPECT_TRUE(_parse_json(new_metadata.updated_metadata_string(), new_root)); + + EXPECT_EQ(new_root, + output_root["metadata"]["entries"][0]["buffer"]["sensor_info"]); + unlink_path(temp_file); +} + +TEST_F(OperationsTest, MetadataRewriteTestMulti) { + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "temp.osf"); + uint64_t header_size = start_osf_file(temp_file); + + _write_init_metadata(temp_file, header_size); + + std::string metadata_json = dump_metadata(temp_file, true); + Json::Value test_root{}; + EXPECT_TRUE(_parse_json(metadata_json, test_root)); + + _verify_empty_metadata(test_root); + + ouster::sensor::sensor_info new_metadata = _gen_new_metadata(100); + ouster::sensor::sensor_info new_metadata2 = _gen_new_metadata(200); + + osf_file_modify_metadata(temp_file, {new_metadata, new_metadata2}); + std::string output_metadata_json = dump_metadata(temp_file, true); + Json::Value output_root{}; + EXPECT_TRUE(_parse_json(output_metadata_json, output_root)); + EXPECT_NE(test_root, output_root); + + Json::Value new_root{}; + EXPECT_TRUE(_parse_json(new_metadata.updated_metadata_string(), new_root)); + Json::Value new_root2{}; + auto temp_string = new_metadata2.updated_metadata_string(); + EXPECT_TRUE(_parse_json(temp_string, new_root2)); + + EXPECT_EQ(new_root, + output_root["metadata"]["entries"][0]["buffer"]["sensor_info"]); + EXPECT_EQ(new_root2, + output_root["metadata"]["entries"][1]["buffer"]["sensor_info"]); + unlink_path(temp_file); +} + +TEST_F(OperationsTest, MetadataRewriteTestPreExisting) { + std::string temp_dir; + EXPECT_TRUE(make_tmp_dir(temp_dir)); + std::string temp_file = path_concat(temp_dir, "temp.osf"); + uint64_t header_size = start_osf_file(temp_file); + MetadataStore meta_store_ = {}; + LidarScanStreamMeta pre_existing_data(12345678, {}); + meta_store_.add(pre_existing_data); + _write_init_metadata(temp_file, header_size, meta_store_); + + std::string metadata_json = dump_metadata(temp_file, true); + Json::Value test_root{}; + EXPECT_TRUE(_parse_json(metadata_json, test_root)); + + _verify_empty_metadata(test_root, 1); + + EXPECT_EQ(test_root["metadata"]["entries"][0]["type"], + "ouster/v1/os_sensor/LidarScanStream"); + EXPECT_EQ(test_root["metadata"]["entries"][0]["buffer"], + "LidarScanStreamMeta: sensor_id = 12345678, field_types = {}"); + + ouster::sensor::sensor_info new_metadata = _gen_new_metadata(100); + + osf_file_modify_metadata(temp_file, {new_metadata}); + std::string output_metadata_json = dump_metadata(temp_file, true); + Json::Value output_root{}; + EXPECT_TRUE(_parse_json(output_metadata_json, output_root)); + EXPECT_NE(test_root, output_root); + + Json::Value new_root{}; + EXPECT_TRUE(_parse_json(new_metadata.updated_metadata_string(), new_root)); + + EXPECT_EQ(output_root["metadata"]["entries"][0]["buffer"], + "LidarScanStreamMeta: sensor_id = 12345678, field_types = {}"); + EXPECT_EQ(new_root, + output_root["metadata"]["entries"][1]["buffer"]["sensor_info"]); + EXPECT_EQ(output_root["metadata"]["entries"].size(), 2); + unlink_path(temp_file); +} + +} // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/png_tools_test.cpp b/ouster_osf/tests/png_tools_test.cpp index c6ef2d91..30b9b442 100644 --- a/ouster_osf/tests/png_tools_test.cpp +++ b/ouster_osf/tests/png_tools_test.cpp @@ -6,6 +6,7 @@ #include "png_tools.h" #include +#include #include #include @@ -22,6 +23,11 @@ namespace ouster { namespace osf { + +// Internals to test +void png_osf_flush_data(png_structp); +void png_osf_error(png_structp png_ptr, png_const_charp msg); + namespace { class OsfPngToolsTest : public OsfTestWithDataAndFiles {}; @@ -277,6 +283,54 @@ TEST_F(OsfPngToolsTest, ImageCoders) { test64bitImageCoders().to(ls, px_offset, 64)); } +TEST_F(OsfPngToolsTest, InternalsTest) { + // This is unused but is still required, test calling it + // Not expecting any returns + png_structp foo = + png_create_read_struct(PNG_LIBPNG_VER_STRING, NULL, NULL, NULL); + png_osf_flush_data(foo); + + bool error_caught = false; + std::stringstream output_stream; + std::streambuf* old_output_stream = std::cout.rdbuf(); + std::cout.rdbuf(output_stream.rdbuf()); + if (setjmp(png_jmpbuf(foo))) { + error_caught = true; + } else { + png_osf_error( + foo, + "Also Checkout Porcupine Tree - Arriving Somewhere But Not Here"); + } + std::cout.rdbuf(old_output_stream); + EXPECT_TRUE(error_caught); + EXPECT_EQ(output_stream.str(), + "ERROR libpng osf: Also Checkout Porcupine Tree" + " - Arriving Somewhere But Not Here\n"); +} + +#ifndef OUSTER_OSF_NO_THREADING +TEST_F(OsfPngToolsTest, scanDecodeFields) { + // it should propagate the exception + // if destagger throws std::invalid_argument + int w = 32; + int h = 32; + auto scan = ouster::LidarScan(w, h); + LidarScanFieldTypes field_types(scan.begin(), scan.end()); + std::vector shift_by_row; + EXPECT_THROW( + { + try { + scanEncodeFields(scan, shift_by_row, field_types); + } catch (const std::invalid_argument& e) { + ASSERT_STREQ(e.what(), + "image height does not match shifts size"); + throw; + } + }, + std::invalid_argument); +} +#endif + } // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/reader_test.cpp b/ouster_osf/tests/reader_test.cpp index ff43ab19..1dbe5428 100644 --- a/ouster_osf/tests/reader_test.cpp +++ b/ouster_osf/tests/reader_test.cpp @@ -10,6 +10,7 @@ #include "common.h" #include "osf_test.h" #include "ouster/osf/meta_lidar_sensor.h" +#include "ouster/osf/meta_streaming_info.h" #include "ouster/osf/stream_lidar_scan.h" namespace ouster { @@ -24,18 +25,119 @@ TEST_F(ReaderTest, Basics) { Reader reader(osf_file); - EXPECT_EQ("from_pcap pythonic", reader.id()); + EXPECT_EQ("from_pcap pythonic", reader.metadata_id()); EXPECT_EQ(991587364520LL, reader.start_ts().count()); EXPECT_EQ(991787323080LL, reader.end_ts().count()); // Get first sensor (it's the first by metadata_id) (i.e. first added) auto sensor = reader.meta_store().get(); EXPECT_TRUE(sensor); - + EXPECT_EQ( + sensor->to_string(), + "{\n \"sensor_info\": \n {\n \"base_pn\": \"\",\n \"base_sn\": " + "\"\",\n \"beam_altitude_angles\": \n [\n 20.95,\n " + "20.67,\n 20.36,\n 20.03,\n 19.73,\n 19.41,\n " + "19.11,\n 18.76,\n 18.47,\n 18.14,\n 17.82,\n " + "17.5,\n 17.19,\n 16.86,\n 16.53,\n 16.2,\n " + "15.89,\n 15.56,\n 15.23,\n 14.9,\n 14.57,\n " + "14.23,\n 13.9,\n 13.57,\n 13.25,\n 12.91,\n " + "12.57,\n 12.22,\n 11.9,\n 11.55,\n 11.2,\n " + "10.87,\n 10.54,\n 10.18,\n 9.84,\n 9.51,\n " + "9.15,\n 8.81,\n 8.47,\n 8.11,\n 7.78,\n " + "7.43,\n 7.08,\n 6.74,\n 6.39,\n 6.04,\n " + "5.7,\n 5.34,\n 4.98,\n 4.64,\n 4.29,\n " + "3.93,\n 3.58,\n 3.24,\n 2.88,\n 2.53,\n " + "2.17,\n 1.82,\n 1.47,\n 1.12,\n 0.78,\n " + "0.41,\n 0.07,\n -0.28,\n -0.64,\n -0.99,\n " + "-1.35,\n -1.7,\n -2.07,\n -2.4,\n -2.75,\n " + "-3.11,\n -3.46,\n -3.81,\n -4.15,\n -4.5,\n " + "-4.86,\n -5.22,\n -5.57,\n -5.9,\n -6.27,\n " + "-6.61,\n -6.97,\n -7.3,\n -7.67,\n -8.01,\n " + "-8.35,\n -8.69,\n -9.05,\n -9.38,\n -9.71,\n " + "-10.07,\n -10.42,\n -10.76,\n -11.09,\n -11.43,\n " + " -11.78,\n -12.12,\n -12.46,\n -12.78,\n " + "-13.15,\n -13.46,\n -13.8,\n -14.12,\n -14.48,\n " + " -14.79,\n -15.11,\n -15.46,\n -15.79,\n " + "-16.12,\n -16.45,\n -16.76,\n -17.11,\n -17.44,\n " + " -17.74,\n -18.06,\n -18.39,\n -18.72,\n " + "-19.02,\n -19.32,\n -19.67,\n -19.99,\n -20.27,\n " + " -20.57,\n -20.92,\n -21.22,\n -21.54,\n " + "-21.82\n ],\n \"beam_azimuth_angles\": \n [\n 4.21,\n " + " 1.41,\n -1.4,\n -4.22,\n 4.22,\n 1.41,\n " + "-1.4,\n -4.23,\n 4.21,\n 1.4,\n -1.42,\n " + "-4.2,\n 4.22,\n 1.41,\n -1.4,\n -4.23,\n " + "4.21,\n 1.41,\n -1.41,\n -4.21,\n 4.22,\n " + "1.4,\n -1.41,\n -4.2,\n 4.22,\n 1.42,\n " + "-1.4,\n -4.2,\n 4.22,\n 1.41,\n -1.42,\n " + "-4.21,\n 4.22,\n 1.41,\n -1.4,\n -4.21,\n " + "4.2,\n 1.4,\n -1.4,\n -4.22,\n 4.21,\n " + "1.41,\n -1.41,\n -4.21,\n 4.22,\n 1.41,\n " + "-1.4,\n -4.21,\n 4.21,\n 1.41,\n -1.4,\n " + "-4.21,\n 4.2,\n 1.41,\n -1.4,\n -4.21,\n " + "4.2,\n 1.4,\n -1.41,\n -4.21,\n 4.22,\n " + "1.4,\n -1.4,\n -4.21,\n 4.22,\n 1.42,\n " + "-1.4,\n -4.2,\n 4.2,\n 1.42,\n -1.4,\n " + "-4.22,\n 4.22,\n 1.41,\n -1.4,\n -4.2,\n " + "4.23,\n 1.41,\n -1.4,\n -4.2,\n 4.21,\n " + "1.41,\n -1.4,\n -4.21,\n 4.21,\n 1.41,\n " + "-1.4,\n -4.21,\n 4.22,\n 1.41,\n -1.39,\n " + "-4.21,\n 4.23,\n 1.41,\n -1.39,\n -4.22,\n " + "4.23,\n 1.4,\n -1.4,\n -4.2,\n 4.21,\n " + "1.41,\n -1.41,\n -4.2,\n 4.22,\n 1.42,\n " + "-1.39,\n -4.22,\n 4.24,\n 1.41,\n -1.41,\n " + "-4.22,\n 4.23,\n 1.41,\n -1.39,\n -4.21,\n " + "4.23,\n 1.41,\n -1.39,\n -4.2,\n 4.23,\n " + "1.4,\n -1.39,\n -4.2,\n 4.22,\n 1.42,\n " + "-1.39,\n -4.2\n ],\n \"build_date\": " + "\"2022-04-14T21:11:47Z\",\n \"build_rev\": \"v2.3.0\",\n " + "\"client_version\": \"ouster_client 0.3.0\",\n \"data_format\": \n " + " {\n \"column_window\": \n [\n 0,\n 1023\n " + " ],\n \"columns_per_frame\": 1024,\n " + "\"columns_per_packet\": 16,\n \"pixel_shift_by_row\": \n " + "[\n 24,\n 16,\n 8,\n 0,\n 24,\n " + " 16,\n 8,\n 0,\n 24,\n 16,\n " + "8,\n 0,\n 24,\n 16,\n 8,\n 0,\n " + " 24,\n 16,\n 8,\n 0,\n 24,\n " + "16,\n 8,\n 0,\n 24,\n 16,\n 8,\n " + " 0,\n 24,\n 16,\n 8,\n 0,\n " + "24,\n 16,\n 8,\n 0,\n 24,\n 16,\n " + " 8,\n 0,\n 24,\n 16,\n 8,\n " + "0,\n 24,\n 16,\n 8,\n 0,\n 24,\n " + " 16,\n 8,\n 0,\n 24,\n 16,\n " + "8,\n 0,\n 24,\n 16,\n 8,\n 0,\n " + " 24,\n 16,\n 8,\n 0,\n 24,\n " + "16,\n 8,\n 0,\n 24,\n 16,\n 8,\n " + " 0,\n 24,\n 16,\n 8,\n 0,\n " + "24,\n 16,\n 8,\n 0,\n 24,\n 16,\n " + " 8,\n 0,\n 24,\n 16,\n 8,\n " + "0,\n 24,\n 16,\n 8,\n 0,\n 24,\n " + " 16,\n 8,\n 0,\n 24,\n 16,\n " + "8,\n 0,\n 24,\n 16,\n 8,\n 0,\n " + " 24,\n 16,\n 8,\n 0,\n 24,\n " + "16,\n 8,\n 0,\n 24,\n 16,\n 8,\n " + " 0,\n 24,\n 16,\n 8,\n 0,\n " + "24,\n 16,\n 8,\n 0,\n 24,\n 16,\n " + " 8,\n 0\n ],\n \"pixels_per_column\": 128,\n " + " \"udp_profile_imu\": \"LEGACY\",\n \"udp_profile_lidar\": " + "\"RNG15_RFL8_NIR8\"\n },\n \"hostname\": \"\",\n " + "\"image_rev\": \"ousteros-image-prod-aries-v2.3.0+20220415163956\",\n " + " \"imu_to_sensor_transform\": \n [\n 1,\n 0,\n " + "0,\n 6.253,\n 0,\n 1,\n 0,\n -11.775,\n " + "0,\n 0,\n 1,\n 7.645,\n 0,\n 0,\n 0,\n " + " 1\n ],\n \"initialization_id\": 7109750,\n " + "\"json_calibration_version\": 4,\n \"lidar_mode\": \"1024x10\",\n " + " \"lidar_origin_to_beam_origin_mm\": 15.806,\n " + "\"lidar_to_sensor_transform\": \n [\n -1,\n 0,\n " + "0,\n 0,\n 0,\n -1,\n 0,\n 0,\n 0,\n " + " 0,\n 1,\n 36.18,\n 0,\n 0,\n 0,\n 1\n " + " ],\n \"prod_line\": \"OS-1-128\",\n \"prod_pn\": " + "\"840-103575-06\",\n \"prod_sn\": \"122201000998\",\n " + "\"proto_rev\": \"\",\n \"status\": \"RUNNING\",\n " + "\"udp_port_imu\": 7503,\n \"udp_port_lidar\": 7502\n }\n}"); EXPECT_EQ(1, reader.meta_store().count()); - EXPECT_EQ(3, std::distance(reader.messages_standard().begin(), - reader.messages_standard().end())); + EXPECT_EQ( + 3, std::distance(reader.messages().begin(), reader.messages().end())); const MetadataStore& meta_store = reader.meta_store(); EXPECT_EQ(3, meta_store.size()); @@ -49,6 +151,38 @@ TEST_F(ReaderTest, ChunksReading) { auto chunks = reader.chunks(); + EXPECT_EQ(chunks.to_string(), "ChunksRange: [ba = 0, ea = 1013976]"); + EXPECT_EQ(chunks.begin().to_string(), "ChunksIter: [ca = 0, ea = 1013976]"); + std::cout << chunks.begin()->end_ts().count() << std::endl; + EXPECT_EQ(chunks.begin()->start_ts(), ts_t(991587364520L)); + EXPECT_EQ(chunks.begin()->end_ts(), ts_t(991787323080L)); + EXPECT_EQ(chunks.begin()->to_string(), + "ChunkRef: [msgs_size = 3, state = (" + "{offset = 0, next_offset = 18446744073709551615," + " start_ts = 991587364520, end_ts = 991787323080," + " status = 1}), chunk_buf_ = nullptr]"); + EXPECT_EQ(((ChunkRef)*chunks.begin())[0].to_string(), + "MessageRef: [id = 2, ts = 991587364520, buffer = 0c" + " 2b 05 00 14 00 00 00 10 00 1c 00 04 00 08 00 0c 00" + " 10 00 14 00 18 00 10 00 00 00 34 38 00 00 24 38 00" + " 00 18 18 00 00 10 10 00 00 08 00 00 00 03 07 00 00" + " 00 04 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01" + " 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00 01 00" + " 00 00 01 00 00 00 01 00 00 00 01 00 00 00 ... and" + " 338604 more ...]"); + EXPECT_EQ(chunks.begin()->begin().to_string(), + "MessagesChunkIter: [chunk_ref = ChunkRef:" + " [msgs_size = 3, state = ({offset = 0," + " next_offset = 18446744073709551615," + " start_ts = 991587364520, end_ts = 991787323080," + " status = 1}), chunk_buf_ = nullptr], msg_idx = 0]"); + EXPECT_EQ(to_string(*chunks.begin()->state()), + "{offset = 0, next_offset = 18446744073709551615," + " start_ts = 991587364520, end_ts = 991787323080, status = 1}"); + EXPECT_EQ(to_string(*chunks.begin()->info()), + "{offset = 0, next_offset = 18446744073709551615," + " stream_id = 2, message_count = 3, message_start_idx = 0}"); + EXPECT_EQ(1, std::distance(chunks.begin(), chunks.end())); auto first_chunk_it = chunks.begin(); @@ -82,25 +216,6 @@ TEST_F(ReaderTest, ChunksPileBasics) { EXPECT_EQ(3, cp.size()); } -TEST_F(ReaderTest, MessagesReadingStandard) { - OsfFile osf_file( - path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf")); - - Reader reader(osf_file); - - const auto msgs = reader.messages_standard(); - EXPECT_EQ(3, std::distance(msgs.begin(), msgs.end())); - - // Chunks Iterator - auto chunks = reader.chunks(); - EXPECT_EQ(1, std::distance(chunks.begin(), chunks.end())); - - // Get messages from first chunks - auto first_chunk_it = chunks.begin(); - EXPECT_EQ(3, first_chunk_it->size()); - EXPECT_EQ(3, std::distance(first_chunk_it->begin(), first_chunk_it->end())); -} - TEST_F(ReaderTest, MessagesReadingStreaming) { OsfFile osf_file( path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf")); @@ -111,7 +226,7 @@ TEST_F(ReaderTest, MessagesReadingStreaming) { int it_cnt = 0; ts_t it_prev{0}; bool it_ordered = true; - for (const auto msg : reader.messages_standard()) { + for (const auto msg : reader.messages()) { it_ordered = it_ordered && (it_prev <= msg.ts()); ++it_cnt; it_prev = msg.ts(); @@ -127,6 +242,19 @@ TEST_F(ReaderTest, MessagesReadingStreaming) { int sit_cnt = 0; ts_t sit_prev{0}; bool sit_ordered = true; + EXPECT_EQ((*reader.messages().begin()).to_string(), + "MessageRef: [id = 2, ts = 991587364520, buffer =" + " 0c 2b 05 00 14 00 00 00 10 00 1c 00 04 00 08 00" + " 0c 00 10 00 14 00 18 00 10 00 00 00 34 38 00 00" + " 24 38 00 00 18 18 00 00 10 10 00 00 08 00 00 00" + " 03 07 00 00 00 04 00 00 01 00 00 00 01 00 00 00" + " 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00" + " 01 00 00 00 01 00 00 00 01 00 00 00 01 00 00 00" + " 01 00 00 00 ... and 338604 more ...]"); + EXPECT_EQ(reader.messages().begin().to_string(), + "MessagesStreamingIter: [curr_ts = 991587364520," + " end_ts = 991787323081, curr_chunks_.size = 1," + " stream_ids_hash_ = 0, top = (ts = 991587364520, id = 2)]"); for (const auto msg : reader.messages()) { sit_ordered = sit_ordered && (sit_prev <= msg.ts()); @@ -136,6 +264,9 @@ TEST_F(ReaderTest, MessagesReadingStreaming) { EXPECT_EQ(3, sit_cnt); EXPECT_TRUE(sit_ordered); + EXPECT_EQ(reader.messages().to_string(), + "MessagesStreamingRange:" + " [start_ts = 991587364520, end_ts = 991787323080]"); EXPECT_EQ( 3, std::distance(reader.messages().begin(), reader.messages().end())); @@ -166,6 +297,32 @@ TEST_F(ReaderTest, MessagesReadingStreaming) { EXPECT_EQ(3, std::distance(scan_msgs_full.begin(), scan_msgs_full.end())); } +TEST_F(ReaderTest, MetadataFromBufferTest) { + OsfFile osf_file( + path_concat(test_data_dir(), "osfs/OS-1-128_v2.3.0_1024x10_lb_n3.osf")); + + Reader reader(osf_file); + + auto sensor = reader.meta_store().entries().begin()->second; + + std::vector buf; + std::stringstream output_stream; + std::streambuf* old_output_stream = std::cout.rdbuf(); + + std::cout.rdbuf(output_stream.rdbuf()); + auto result = sensor->from_buffer(buf, "NON EXISTENT"); + std::cout.rdbuf(old_output_stream); + EXPECT_EQ(output_stream.str(), "UNKNOWN TYPE: NON EXISTENT\n"); + EXPECT_EQ(result, nullptr); + std::cout.rdbuf(old_output_stream); + + result = sensor->from_buffer(sensor->buffer(), + "ouster/v1/os_sensor/LidarSensor"); + EXPECT_NE(result, nullptr); + EXPECT_EQ(result->id(), 0); + EXPECT_EQ(result->type(), "ouster/v1/os_sensor/LidarSensor"); +} + } // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/writer_custom_test.cpp b/ouster_osf/tests/writer_custom_test.cpp index 982c8c7c..e14261a1 100644 --- a/ouster_osf/tests/writer_custom_test.cpp +++ b/ouster_osf/tests/writer_custom_test.cpp @@ -74,13 +74,13 @@ struct yo { class YoStream : public MessageStream { public: YoStream(Writer& writer) : writer_{writer}, meta_{} { - stream_meta_id_ = writer_.addMetadata(meta_); + stream_meta_id_ = writer_.add_metadata(meta_); }; // Boilerplate for writer void save(const ouster::osf::ts_t ts, const obj_type& yo_obj) { const auto& msg_buf = make_msg(yo_obj); - writer_.saveMessage(meta_.id(), ts, msg_buf); + writer_.save_message(meta_.id(), ts, msg_buf); } // Pack yo message into buffer @@ -107,13 +107,13 @@ TEST_F(WriterCustomTest, WriteCustomMsgExample) { std::string output_osf_filename = tmp_file("writer_new_meta_info_msg.osf"); // Create OSF v2 Writer - osf::Writer writer(output_osf_filename, "Yo Example"); + osf::Writer writer(output_osf_filename); // Create LidarSensor record - writer.addMetadata("Happy New Year!"); + writer.add_metadata("Happy New Year!"); // Create stream for `yo` objects - auto yo_stream = writer.createStream(); + auto yo_stream = writer.create_stream(); uint8_t yo_cnt = 0; while (yo_cnt < 100) { @@ -159,4 +159,4 @@ TEST_F(WriterCustomTest, WriteCustomMsgExample) { } } // namespace osf -} // namespace ouster \ No newline at end of file +} // namespace ouster diff --git a/ouster_osf/tests/writer_test.cpp b/ouster_osf/tests/writer_test.cpp index c2a02ac2..80bdd83d 100644 --- a/ouster_osf/tests/writer_test.cpp +++ b/ouster_osf/tests/writer_test.cpp @@ -48,27 +48,22 @@ TEST_F(WriterTest, WriteSingleLidarScan) { std::string output_osf_filename = tmp_file("writer_simple.osf"); - std::string sinfo_str = sinfo.original_string(); + std::string sinfo_str = sinfo.updated_metadata_string(); // Writing LidarScan - Writer writer(output_osf_filename, "test_session"); + Writer writer(output_osf_filename); + writer.set_metadata_id("test_session"); EXPECT_EQ(writer.chunks_layout(), ChunksLayout::LAYOUT_STREAMING); - auto sensor_meta_id = writer.addMetadata(sinfo_str); - - EXPECT_THROW({ writer.addMetadata(sinfo); }, - std::invalid_argument); - - auto ls_stream = writer.createStream( - sensor_meta_id, get_field_types(sinfo)); - ls_stream.save(ts_t{123}, ls); + writer.add_sensor(sinfo, get_field_types(sinfo)); + writer.save(0, ls, ts_t{123}); writer.close(); OsfFile osf_file(output_osf_filename); EXPECT_TRUE(osf_file.good()); Reader reader(osf_file); - EXPECT_EQ(reader.id(), "test_session"); + EXPECT_EQ(reader.metadata_id(), "test_session"); auto msg_it = reader.messages().begin(); EXPECT_NE(msg_it, reader.messages().end()); @@ -107,12 +102,12 @@ TEST_F(WriterTest, WriteLidarSensorWithExtrinsics) { sinfo.extrinsic(0, 0) = 0.0; // Writing LidarSensor - Writer writer(output_osf_filename, "test_session"); + Writer writer(output_osf_filename); - auto sensor_meta_id = writer.addMetadata(sinfo_str); + auto sensor_meta_id = writer.add_metadata(sinfo_str); EXPECT_TRUE(sensor_meta_id != 0); - writer.addMetadata(sinfo.extrinsic, sensor_meta_id); + writer.add_metadata(sinfo.extrinsic, sensor_meta_id); writer.close(); @@ -132,6 +127,9 @@ TEST_F(WriterTest, WriteLidarSensorWithExtrinsics) { auto extrinsics = reader.meta_store().find(); EXPECT_EQ(extrinsics.size(), 1); + EXPECT_EQ(extrinsics.begin()->second->repr(), + "ExtrinsicsMeta: ref_id = 1, name = , extrinsics = 0 0.756 0 10 " + "0.756 1 0 0 0 0 1 0 0 0 0 1"); auto ext_mat_recovered = extrinsics.begin()->second->extrinsics(); EXPECT_EQ(sinfo.extrinsic, ext_mat_recovered); @@ -145,23 +143,22 @@ TEST_F(WriterTest, WriteSingleLidarScanStreamingLayout) { std::string output_osf_filename = tmp_file("writer_simple_streaming.osf"); - std::string sinfo_str = sinfo.original_string(); + std::string sinfo_str = sinfo.updated_metadata_string(); // Writing LidarScan - Writer writer(output_osf_filename, "test_session"); + Writer writer(output_osf_filename); + writer.set_metadata_id("test_session"); EXPECT_EQ(writer.chunks_layout(), ChunksLayout::LAYOUT_STREAMING); - auto sensor_meta_id = writer.addMetadata(sinfo_str); - auto ls_stream = writer.createStream( - sensor_meta_id, get_field_types(sinfo)); - ls_stream.save(ts_t{123}, ls); + writer.add_sensor(sinfo, get_field_types(sinfo)); + writer.save(0, ls, ts_t{123}); writer.close(); OsfFile osf_file(output_osf_filename); EXPECT_TRUE(osf_file.good()); Reader reader(osf_file); - EXPECT_EQ(reader.id(), "test_session"); + EXPECT_EQ(reader.metadata_id(), "test_session"); // TODO[pb]: Add reader validation CRC @@ -215,21 +212,19 @@ TEST_F(WriterTest, WriteSlicedLidarScan) { std::string output_osf_filename = tmp_file("writer_sliced.osf"); - std::string sinfo_str = sinfo.original_string(); + std::string sinfo_str = sinfo.updated_metadata_string(); // Writing LidarScan - Writer writer(output_osf_filename, "test_session"); - auto sensor_meta_id = writer.addMetadata(sinfo_str); - auto ls_stream = - writer.createStream(sensor_meta_id, field_types); - ls_stream.save(ts_t{123}, ls); + Writer writer(output_osf_filename, sinfo, field_types); + writer.set_metadata_id("test_session"); + writer.save(0, ls, ts_t{123}); writer.close(); OsfFile osf_file(output_osf_filename); EXPECT_TRUE(osf_file.good()); Reader reader(osf_file); - EXPECT_EQ(reader.id(), "test_session"); + EXPECT_EQ(reader.metadata_id(), "test_session"); auto msg_it = reader.messages().begin(); EXPECT_NE(msg_it, reader.messages().end()); @@ -261,7 +256,7 @@ TEST_F(WriterTest, WriteSlicedLegacyLidarScan) { test_data_dir(), "metadata/2_5_0_os-992146000760-128_legacy.json")); LidarScan ls_orig = get_random_lidar_scan(sinfo); - // Subset of fields to leave in LidarScan (or extend ... ) during writing + // Subset of fields to leave in LidarScan during writing LidarScanFieldTypes field_types; field_types.emplace_back(sensor::ChanField::RANGE, sensor::ChanFieldType::UINT32); @@ -269,8 +264,6 @@ TEST_F(WriterTest, WriteSlicedLegacyLidarScan) { sensor::ChanFieldType::UINT16); field_types.emplace_back(sensor::ChanField::REFLECTIVITY, sensor::ChanFieldType::UINT8); - field_types.emplace_back(sensor::ChanField::REFLECTIVITY2, - sensor::ChanFieldType::UINT16); std::cout << "LidarScan field_types: " << ouster::to_string(field_types) << std::endl; @@ -279,35 +272,25 @@ TEST_F(WriterTest, WriteSlicedLegacyLidarScan) { // that will be compared with a recovered LidarScan from OSF auto ls_reference = slice_with_cast(ls_orig, field_types); - // Check that we have non existent REFLECTIVITY2 set as Zero - img_t refl2{ls_reference.h, ls_reference.w}; - impl::visit_field(ls_reference, sensor::ChanField::REFLECTIVITY2, - ouster::impl::read_and_cast(), refl2); - EXPECT_TRUE((refl2 == 0).all()); - EXPECT_EQ(field_types.size(), std::distance(ls_reference.begin(), ls_reference.end())); std::string output_osf_filename = tmp_file("writer_sliced_legacy.osf"); - std::string sinfo_str = sinfo.original_string(); + std::string sinfo_str = sinfo.updated_metadata_string(); - // Writing LidarScan - Writer writer(output_osf_filename, "test_session"); - auto sensor_meta_id = writer.addMetadata(sinfo_str); - - // Creating LidarScanStream with custom field_types, that will be used to - // transform LidarScan during save() - auto ls_stream = - writer.createStream(sensor_meta_id, field_types); - ls_stream.save(ts_t{123}, ls_orig); + // Writing LidarScan with custom field types + Writer writer(output_osf_filename, sinfo, field_types); + writer.set_metadata_id("test_session"); + + writer.save(0, ls_orig, ts_t{123}); writer.close(); OsfFile osf_file(output_osf_filename); EXPECT_TRUE(osf_file.good()); Reader reader(osf_file); - EXPECT_EQ(reader.id(), "test_session"); + EXPECT_EQ(reader.metadata_id(), "test_session"); auto msg_it = reader.messages().begin(); EXPECT_NE(msg_it, reader.messages().end()); @@ -392,19 +375,16 @@ TEST_F(WriterTest, WriteCustomLidarScanWithFlags) { std::string sinfo_str = sinfo.original_string(); // Writing LidarScan - Writer writer(output_osf_filename, "test_session"); - auto sensor_meta_id = writer.addMetadata(sinfo_str); - - auto ls_stream = writer.createStream(sensor_meta_id, - get_field_types(ls)); - ls_stream.save(ts_t{123}, ls); + Writer writer(output_osf_filename, sinfo, get_field_types(ls)); + writer.set_metadata_id("test_session"); + writer.save(0, ls, ts_t{123}); writer.close(); OsfFile osf_file(output_osf_filename); EXPECT_TRUE(osf_file.good()); Reader reader(osf_file); - EXPECT_EQ(reader.id(), "test_session"); + EXPECT_EQ(reader.metadata_id(), "test_session"); auto msg_it = reader.messages().begin(); EXPECT_NE(msg_it, reader.messages().end()); @@ -456,20 +436,16 @@ TEST_F(WriterTest, WriteExample) { // Get sensor_info const sensor_info sinfo = sensor::metadata_from_json( path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); - std::string sensor_metadata = sinfo.original_string(); std::string output_osf_filename = tmp_file("write_example.osf"); // Create OSF v2 Writer - osf::Writer writer(output_osf_filename, "Example Session 1234"); + osf::Writer writer(output_osf_filename); + writer.set_metadata_id("Example Session 1234"); EXPECT_EQ(writer.chunks_layout(), ChunksLayout::LAYOUT_STREAMING); // Create LidarSensor record - auto sensor_meta_id = writer.addMetadata(sensor_metadata); - - // Create stream for LidarScan objects - auto ls_stream = writer.createStream( - sensor_meta_id, get_field_types(sinfo)); + auto sensor_id = writer.add_sensor(sinfo); const int LOOP_CNT = 7; @@ -478,7 +454,7 @@ TEST_F(WriterTest, WriteExample) { LidarScan ls = get_random_lidar_scan(sinfo); // Save LidarScan - ls_stream.save(ts_t{timestamp}, ls); + writer.save(sensor_id, ls, ts_t{timestamp}); } writer.close(); @@ -543,6 +519,12 @@ TEST_F(WriterTest, WriteExample) { ReadExample(output_osf_filename); } +TEST_F(WriterTest, FileNameOnlyWriterTest) { + osf::Writer writer("FOOBARBAT"); + EXPECT_EQ(writer.filename(), "FOOBARBAT"); + EXPECT_EQ(writer.metadata_id(), "ouster_sdk"); +} + } // namespace } // namespace osf } // namespace ouster diff --git a/ouster_osf/tests/writerv2_test.cpp b/ouster_osf/tests/writerv2_test.cpp new file mode 100644 index 00000000..ee62a89e --- /dev/null +++ b/ouster_osf/tests/writerv2_test.cpp @@ -0,0 +1,203 @@ +/** + * Copyright(c) 2021, Ouster, Inc. + * All rights reserved. + */ + +#include + +#include + +#include "common.h" +#include "osf_test.h" +#include "ouster/lidar_scan.h" +#include "ouster/osf/file.h" +#include "ouster/osf/meta_extrinsics.h" +#include "ouster/osf/meta_lidar_sensor.h" +#include "ouster/osf/meta_streaming_info.h" +#include "ouster/osf/reader.h" +#include "ouster/osf/stream_lidar_scan.h" +#include "ouster/osf/writer.h" +#include "ouster/types.h" + +namespace ouster { +namespace osf { +namespace { + +using ouster::osf::get_random_lidar_scan; +using ouster::sensor::sensor_info; + +class WriterV2Test : public osf::OsfTestWithDataAndFiles {}; + +TEST_F(WriterV2Test, WriterV2AccessorTest) { + const int chunk_size = 1234; + std::string output_osf_filename = tmp_file("WriterV2AccessorTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + const sensor::sensor_info info2 = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-0-128-U1_v2.3.0_1024x10.json")); + { + std::vector info_compare = {info}; + Writer writer(output_osf_filename, info, LidarScanFieldTypes(), + chunk_size); + EXPECT_EQ(writer.chunk_size(), chunk_size); + EXPECT_EQ(writer.sensor_info_count(), 1); + EXPECT_EQ(writer.filename(), output_osf_filename); + EXPECT_EQ(writer.sensor_info(), info_compare); + EXPECT_EQ(writer.sensor_info(0), info); + } + { + std::vector info_compare = {info, info2}; + Writer writer(output_osf_filename, info_compare, LidarScanFieldTypes(), + chunk_size); + EXPECT_EQ(writer.sensor_info_count(), 2); + + EXPECT_EQ(writer.sensor_info(), info_compare); + EXPECT_EQ(writer.sensor_info(0), info); + EXPECT_EQ(writer.sensor_info(1), info2); + } +} + +TEST_F(WriterV2Test, WriterV2BoundingTest) { + const int chunk_size = 1234; + std::string output_osf_filename = tmp_file("WriterV2BoundingTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + Writer writer(output_osf_filename, info, LidarScanFieldTypes(), chunk_size); + + bool caught = false; + try { + LidarScan one; + writer.save(1, one); + } catch (const std::logic_error& e) { + EXPECT_EQ(std::string(e.what()), "ERROR: Bad Stream ID"); + caught = true; + } catch (...) { + FAIL(); + } + EXPECT_TRUE(caught); + caught = false; + try { + LidarScan one; + LidarScan two; + writer.save({one, two}); + } catch (const std::logic_error& e) { + EXPECT_EQ(std::string(e.what()), + "ERROR: Scans passed in to writer " + "does not match number of sensor infos"); + caught = true; + } catch (...) { + FAIL(); + } + EXPECT_TRUE(caught); +} + +TEST_F(WriterV2Test, WriterV2CloseTest) { + std::string output_osf_filename = tmp_file("WriterV2CloseTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + LidarScan ls = get_random_lidar_scan(info); + Writer writer(output_osf_filename, info); + writer.save(0, ls); + EXPECT_FALSE(writer.is_closed()); + writer.close(); + EXPECT_TRUE(writer.is_closed()); + bool caught = false; + try { + writer.save({ls}); + } catch (const std::logic_error& e) { + EXPECT_EQ(std::string(e.what()), "ERROR: Writer is closed"); + caught = true; + } catch (...) { + FAIL(); + } + EXPECT_TRUE(caught); +} + +void test_single_file(std::string& output_osf_filename, LidarScan& ls) { + Reader reader(output_osf_filename); + + auto msg_it = reader.messages().begin(); + EXPECT_NE(msg_it, reader.messages().end()); + + auto ls_recovered = msg_it->decode_msg(); + + EXPECT_TRUE(ls_recovered); + EXPECT_EQ(*ls_recovered, ls); + + EXPECT_EQ(++msg_it, reader.messages().end()); +} + +TEST_F(WriterV2Test, WriterV2SingleIndexedTest) { + std::string output_osf_filename = tmp_file("WriterV2SingleIndexedTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + LidarScan ls = get_random_lidar_scan(info); + { + Writer writer(output_osf_filename, info); + writer.save(0, ls); + } + test_single_file(output_osf_filename, ls); +} + +TEST_F(WriterV2Test, WriterV2SingleVectorTest) { + std::string output_osf_filename = tmp_file("WriterV2SingleVectorTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + LidarScan ls = get_random_lidar_scan(info); + { + Writer writer(output_osf_filename, info); + writer.save({ls}); + } + test_single_file(output_osf_filename, ls); +} + +void test_multi_file(std::string& output_osf_filename, LidarScan& ls, + LidarScan& ls2) { + Reader reader(output_osf_filename); + auto msg_it = reader.messages().begin(); + EXPECT_NE(msg_it, reader.messages().end()); + auto ls_recovered = msg_it->decode_msg(); + EXPECT_TRUE(ls_recovered); + EXPECT_EQ(*ls_recovered, ls); + EXPECT_NE(++msg_it, reader.messages().end()); + auto ls_recovered2 = msg_it->decode_msg(); + EXPECT_TRUE(ls_recovered2); + EXPECT_EQ(*ls_recovered2, ls2); + EXPECT_EQ(++msg_it, reader.messages().end()); +} + +TEST_F(WriterV2Test, WriterV2MultiIndexedTest) { + std::string output_osf_filename = tmp_file("WriterV2MultiIndexedTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + const sensor::sensor_info info2 = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-0-128-U1_v2.3.0_1024x10.json")); + + LidarScan ls = get_random_lidar_scan(info); + LidarScan ls2 = get_random_lidar_scan(info2); + { + Writer writer(output_osf_filename, {info, info2}); + writer.save(0, ls); + writer.save(1, ls2); + } + test_multi_file(output_osf_filename, ls2, ls); +} + +TEST_F(WriterV2Test, WriterV2MultiVectorTest) { + std::string output_osf_filename = tmp_file("WriterV2MultiVectorTest.osf"); + const sensor::sensor_info info = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-1-128_v2.3.0_1024x10.json")); + const sensor::sensor_info info2 = sensor::metadata_from_json( + path_concat(test_data_dir(), "pcaps/OS-0-128-U1_v2.3.0_1024x10.json")); + + LidarScan ls = get_random_lidar_scan(info); + LidarScan ls2 = get_random_lidar_scan(info2); + { + Writer writer(output_osf_filename, {info, info2}); + writer.save({ls, ls2}); + } + test_multi_file(output_osf_filename, ls2, ls); +} +} // namespace +} // namespace osf +} // namespace ouster diff --git a/ouster_pcap/CMakeLists.txt b/ouster_pcap/CMakeLists.txt index a78e57fb..611d55f0 100644 --- a/ouster_pcap/CMakeLists.txt +++ b/ouster_pcap/CMakeLists.txt @@ -2,6 +2,8 @@ find_package(Pcap REQUIRED) find_package(libtins REQUIRED) +include(Coverage) + # ==== Libraries ==== add_library(ouster_pcap src/pcap.cpp src/os_pcap.cpp src/indexed_pcap_reader.cpp) target_include_directories(ouster_pcap SYSTEM PRIVATE @@ -9,6 +11,7 @@ target_include_directories(ouster_pcap SYSTEM PRIVATE target_include_directories(ouster_pcap PUBLIC $ $) +CodeCoverageFunctionality(ouster_pcap) if(WIN32) target_compile_options(ouster_pcap PRIVATE /wd4200) diff --git a/ouster_pcap/include/ouster/indexed_pcap_reader.h b/ouster_pcap/include/ouster/indexed_pcap_reader.h index 968aaaf4..8fa5137e 100644 --- a/ouster_pcap/include/ouster/indexed_pcap_reader.h +++ b/ouster_pcap/include/ouster/indexed_pcap_reader.h @@ -14,15 +14,36 @@ namespace sensor_utils { struct PcapIndex { using frame_index = std::vector; ///< Maps a frame number to a file offset + std::vector frame_indices_; ///< frame index for each sensor - PcapIndex(size_t num_sensors) : frame_indices_(num_sensors) {} + using timestamp_index = std::unordered_map; + + // TODO: this isn't used for now but in the future might be used to + // solve the issue with the limited frame_id span (we could remove it) + std::vector frame_timestamp_indices_; + + using frame_id_index = std::unordered_map; + + // TODO[IMPORTANT]: this has an issue if the recorded pcap file to span + // over 50 mins. + std::vector frame_id_indices_; + + PcapIndex(size_t num_sensors) + : frame_indices_(num_sensors), + frame_timestamp_indices_(num_sensors), + frame_id_indices_(num_sensors) {} + + /** + * Simple method to clear the index. + */ + void clear(); /** * Returns the number of frames in the frame index for the given sensor * index. * - * @param sensor_index[in] The position of the sensor for which to retrieve + * @param[in] sensor_index The position of the sensor for which to retrieve * the desired frame count. * @return The number of frames in the sensor's frame index. */ @@ -32,6 +53,12 @@ struct PcapIndex { * Seeks the given reader to the given frame number for the given sensor * index */ + // TODO[UN]: in my opinion we are better off removing this method from this + // class It is better if we keep this class as a simple POD object. Another + // problem with this method specifically is that it creates a cyclic and + // this is the reason why we are passing PcapReader instead of + // IndexedPcapReader to avoid this cyclic relation. If it mounts to anything + // this method should be part of the IndexedPcapReader. void seek_to_frame(PcapReader& reader, size_t sensor_index, unsigned int frame_number); }; @@ -44,12 +71,32 @@ struct PcapIndex { */ struct IndexedPcapReader : public PcapReader { /** - * @param pcap_filename[in] A file path of the pcap to read - * @param metadata_filenames[in] A vector of sensor metadata file paths + * @param[in] pcap_filename A file path of the pcap to read + * @param[in] metadata_filenames A vector of sensor metadata file paths */ IndexedPcapReader(const std::string& pcap_filename, const std::vector& metadata_filenames); + /** + * @param[in] pcap_filename A file path of the pcap to read + * @param[in] sensor_infos A vector of sensor info structures for each + * sensors + */ + IndexedPcapReader( + const std::string& pcap_filename, + const std::vector& sensor_infos); + + /** + * This method constructs the index. Call this method before requesting the + * index information using get_index() + */ + void build_index(); + + /** + * Get index for the underlying pcap + * + * @return returns a PcapIndex object + */ const PcapIndex& get_index() const; /** @@ -71,6 +118,9 @@ struct IndexedPcapReader : public PcapReader { * Updates the frame index for the current packet * @return the progress of indexing as an int from [0, 100] */ + // TODO: I recommend take this a private method, the problem with exposing + // this method is that it only yields right results if invoked sequentially + // ; the results are dependent on the internal state. int update_index_for_current_packet(); /** @@ -86,6 +136,8 @@ struct IndexedPcapReader : public PcapReader { sensor_infos_; ///< A vector of sensor_info that correspond to the ///< provided metadata files PcapIndex index_; + + // TODO: remove, this should be a transient variable std::vector> previous_frame_ids_; ///< previous frame id for each sensor }; diff --git a/ouster_pcap/include/ouster/os_pcap.h b/ouster_pcap/include/ouster/os_pcap.h index 360af3bd..38d2e641 100644 --- a/ouster_pcap/include/ouster/os_pcap.h +++ b/ouster_pcap/include/ouster/os_pcap.h @@ -177,7 +177,7 @@ bool next_packet_info(playback_handle& handle, packet_info& info); * * @param[in] handle The playback handle. * @param[out] buf The buffer to write the recieved data to (Must be sized - * appropriately. + * appropriately. * @param[in] buffer_size The size of the output buffer. * * @return 0 on no new packet, > 0 the size of the bytes recieved. @@ -213,7 +213,7 @@ void record_uninitialize(record_handle& handle); * @param[in] buf The buffer to record to the pcap file. * @param[in] buffer_size The size of the buffer to record to the pcap file. * @param[in] microsecond_timestamp The timestamp to record the packet as - * microseconds. + * microseconds. */ void record_packet(record_handle& handle, const std::string& src_ip, const std::string& dst_ip, int src_port, int dst_port, @@ -236,7 +236,7 @@ void record_packet(record_handle& handle, const packet_info& info, * * @param[in] file The pcap file to read. * @param[in] packets_to_process Number of packets to process < 0 for all of - * them + * them * * @return A pointer to the resulting stream_info */ @@ -248,11 +248,13 @@ std::shared_ptr get_stream_info(const std::string& file, * * @param[in] file The pcap file to read. * @param[in] progress_callback A callback to invoke after each packet is - * scanned current: The current file offset delta: The delta in file offset - * total: The total size of the file + * scanned + * current: The current file offset + * delta: The delta in file offset + * total: The total size of the file * @param[in] packets_per_callback Callback every n packets * @param[in] packets_to_process Number of packets to process < 0 for all of - * them + * them * * @return A pointer to the resulting stream_info */ @@ -267,14 +269,14 @@ std::shared_ptr get_stream_info( * indicies (if the PcapReader is an IndexedPcapReader). * * @param[in] pcap_reader The PcapReader - * @param[in] sensor_info a set of sensor_info used to parse packets contained - * in the file * @param[in] progress_callback A callback to invoke after each packet is - * scanned current: The current file offset delta: The delta in file offset - * total: The total size of the file + * scanned + * current: The current file offset + * delta: The delta in file offset + * total: The total size of the file * @param[in] packets_per_callback Callback every n packets * @param[in] packets_to_process Number of packets to process < 0 for all of - * them + * them * * @return A pointer to the resulting stream_info */ @@ -286,12 +288,12 @@ std::shared_ptr get_stream_info( * Return a guess of the correct ports located in a pcap file. * * @param[in] info The stream_info structure generated from a specific pcap file - * @param[in] lidar_packet_sizes The size of the lidar packets - * @param[in] imu_packet_sizes The size of the imu packets - * @param[in] lidar_spec The expected lidar port from the metadata(pass 0 for - * unknown) - * @param[in] imu_spec The expected imu port from the metadata(pass 0 for - * unknown) + * @param[in] lidar_packet_size The size of the lidar packets + * @param[in] imu_packet_size The size of the imu packets + * @param[in] expected_lidar_port The expected lidar port from the metadata + * (pass 0 for unknown) + * @param[in] expected_imu_port The expected imu port from the metadata + * (pass 0 for unknown) * * @return A vector (sorted by most likely to least likely) of the guessed ports */ diff --git a/ouster_pcap/include/ouster/pcap.h b/ouster_pcap/include/ouster/pcap.h index a7c71208..f959c816 100644 --- a/ouster_pcap/include/ouster/pcap.h +++ b/ouster_pcap/include/ouster/pcap.h @@ -50,7 +50,7 @@ class PcapReader { public: /** - * @param file[in] A filepath of the pcap to read + * @param[in] file A filepath of the pcap to read */ PcapReader(const std::string& file); virtual ~PcapReader(); @@ -103,13 +103,13 @@ class PcapReader { * Seek to the position in the file represented by the * number of bytes from the beginning of the file. * - * @param offset[in] The position to seek to in bytes, + * @param[in] offset The position to seek to in bytes, * starting from the beginning of the file. * - * @pre \paramname{offset} must be the offset of a PCAP + * @pre offset must be the offset of a PCAP * record header. If any other value is provided, * subsequent packet reads from this PcapReader will be - * invalid until \functionname{reset} is called. + * invalid until PcapReader::reset is called. */ void seek(uint64_t offset); @@ -135,9 +135,9 @@ class PcapWriter { }; /** - * @param file[in] The file path to write the pcap to - * @param encap[in] The encapsulation to use for the pcap - * @param frag_size[in] The fragmentation size to use (Currently broken) + * @param[in] file The file path to write the pcap to + * @param[in] encap The encapsulation to use for the pcap + * @param[in] frag_size The fragmentation size to use (Currently broken) */ PcapWriter(const std::string& file, PacketEncapsulation encap, uint16_t frag_size); @@ -146,13 +146,13 @@ class PcapWriter { /** * Write a packet using a buffer to the pcap * - * @param buf[in] The buffer to write - * @param buf_size[in] The size of the buffer to write - * @param src_ip[in] The source ip address to use for the packet - * @param dst_ip[in] The destination ip address to use for the packet - * @param src_port[in] The source port number to use for the packet - * @param dst_port[in] The destination port number to use for the packet - * @param timestamp[in] The timestamp of the packet to record + * @param[in] buf The buffer to write + * @param[in] buf_size The size of the buffer to write + * @param[in] src_ip The source ip address to use for the packet + * @param[in] dst_ip The destination ip address to use for the packet + * @param[in] src_port The source port number to use for the packet + * @param[in] dst_port The destination port number to use for the packet + * @param[in] timestamp The timestamp of the packet to record * @note The timestamp parameter does not affect the order of packets being * recorded, it is strictly recorded FIFO. */ @@ -164,9 +164,9 @@ class PcapWriter { /** * Write a packet using a buffer to the pcap * - * @param buf[in] The buffer to write - * @param buf_size[in] The size of the buffer to write - * @param info[in] The packet info object to use for the recording + * @param[in] buf The buffer to write + * @param[in] buf_size The size of the buffer to write + * @param[in] info The packet info object to use for the recording * parameters * @note The timestamp parameter in info does not affect the order of * packets being recorded, it is strictly recorded FIFO. diff --git a/ouster_pcap/src/indexed_pcap_reader.cpp b/ouster_pcap/src/indexed_pcap_reader.cpp index ca8f79b2..960cd748 100644 --- a/ouster_pcap/src/indexed_pcap_reader.cpp +++ b/ouster_pcap/src/indexed_pcap_reader.cpp @@ -15,6 +15,14 @@ IndexedPcapReader::IndexedPcapReader( } } +IndexedPcapReader::IndexedPcapReader( + const std::string& pcap_filename, + const std::vector& sensor_infos) + : PcapReader(pcap_filename), + sensor_infos_(sensor_infos), + index_(sensor_infos.size()), + previous_frame_ids_(sensor_infos.size()) {} + nonstd::optional IndexedPcapReader::sensor_idx_for_current_packet() const { const auto& pkt_info = current_info(); @@ -49,22 +57,42 @@ int IndexedPcapReader::update_index_for_current_packet() { sensor_idx_for_current_packet()) { if (nonstd::optional frame_id = current_frame_id()) { if (!previous_frame_ids_[*sensor_info_idx] || - *previous_frame_ids_[*sensor_info_idx] < - *frame_id // frame_id is greater than previous - || frame_id_rolled_over(*previous_frame_ids_[*sensor_info_idx], - *frame_id)) { + *previous_frame_ids_[*sensor_info_idx] < *frame_id || + frame_id_rolled_over(*previous_frame_ids_[*sensor_info_idx], + *frame_id)) { index_.frame_indices_[*sensor_info_idx].push_back( current_info().file_offset); + index_.frame_timestamp_indices_[*sensor_info_idx].insert( + {current_info().timestamp.count(), + current_info().file_offset}); + index_.frame_id_indices_[*sensor_info_idx].insert( + {*frame_id, current_info().file_offset}); previous_frame_ids_[*sensor_info_idx] = *frame_id; } } } + return static_cast(100 * static_cast(current_offset()) / file_size()); } +void IndexedPcapReader::build_index() { + index_.clear(); + reset(); + while (next_packet() != 0) update_index_for_current_packet(); + reset(); +} + const PcapIndex& IndexedPcapReader::get_index() const { return index_; } +void PcapIndex::clear() { + for (size_t i = 0; i < frame_indices_.size(); ++i) { + frame_indices_[i].clear(); + frame_timestamp_indices_[i].clear(); + frame_id_indices_[i].clear(); + } +} + void PcapIndex::seek_to_frame(PcapReader& reader, size_t sensor_index, unsigned int frame_number) { reader.seek(frame_indices_.at(sensor_index).at(frame_number)); diff --git a/ouster_pcap/src/os_pcap.cpp b/ouster_pcap/src/os_pcap.cpp index d8c0add3..e090079d 100644 --- a/ouster_pcap/src/os_pcap.cpp +++ b/ouster_pcap/src/os_pcap.cpp @@ -22,6 +22,9 @@ namespace ouster { namespace sensor_utils { +// TODO: IndexedPcapReader bypasses playback_handle and record_handle, either +// use the same fencing mechansim or switch to use OOP style + struct record_handle { record_handle(const std::string& path, PcapWriter::PacketEncapsulation encap, uint16_t frag_size) diff --git a/ouster_pcap/src/pcap.cpp b/ouster_pcap/src/pcap.cpp index 38b8950a..ffcf2477 100644 --- a/ouster_pcap/src/pcap.cpp +++ b/ouster_pcap/src/pcap.cpp @@ -2,11 +2,11 @@ * Copyright (c) 2022, Ouster, Inc. * All rights reserved. * - * @TODO check that the header casting is idiomatic libpcap - * @TODO warn on dropped packets when pcap contains garbage, when fragments + * @todo check that the header casting is idiomatic libpcap + * @todo warn on dropped packets when pcap contains garbage, when fragments * missing, buffer reused before sending - * @TODO split up reading / playback - * @TODO improve error reporting + * @todo split up reading / playback + * @todo improve error reporting */ #define _FILE_OFFSET_BITS 64 diff --git a/ouster_viz/CMakeLists.txt b/ouster_viz/CMakeLists.txt index f7380ad3..4f9eff5a 100644 --- a/ouster_viz/CMakeLists.txt +++ b/ouster_viz/CMakeLists.txt @@ -1,6 +1,7 @@ # ==== Requirements ==== set(OpenGL_GL_PREFERENCE LEGACY) find_package(OpenGL REQUIRED) +include(Coverage) # default to glad, if found. Note: this can be overridden from the command line find_package(glad QUIET) @@ -27,6 +28,7 @@ add_library(ouster_viz src/point_viz.cpp src/cloud.cpp src/camera.cpp src/image. src/gltext.cpp src/misc.cpp src/glfw.cpp) target_link_libraries(ouster_viz PRIVATE Eigen3::Eigen glfw ${GL_LOADER} OpenGL::GL ouster_client) +CodeCoverageFunctionality(ouster_viz) target_include_directories(ouster_viz PUBLIC $ diff --git a/ouster_viz/include/ouster/point_viz.h b/ouster_viz/include/ouster/point_viz.h index 089e9cf5..320db0dc 100644 --- a/ouster_viz/include/ouster/point_viz.h +++ b/ouster_viz/include/ouster/point_viz.h @@ -127,6 +127,20 @@ class PointViz { */ void visible(bool state); + /** + * Check if viz update_on_input state + * + * @return true if the viz will update on input + */ + bool update_on_input(); + + /** + * Set viz update_on_input flag. + * + * @param[in] state new value of the flag + */ + void update_on_input(bool state); + /** * Update visualization state * @@ -685,8 +699,8 @@ class Cloud { /** * Set the key alpha values, leaving the color the same. * - * @param[in] key pointer to array of at least as many elements as there are - * points, normalized between 0 and 1 + * @param[in] key_alpha pointer to array of at least as many elements as + * there are points, normalized between 0 and 1 */ void set_key_alpha(const float* key_alpha); @@ -701,7 +715,7 @@ class Cloud { /** * Set the key values in RGBA format, used for coloring. * - * @param[in] key_rgb pointer to array of at least 4x as many elements as + * @param[in] key_rgba pointer to array of at least 4x as many elements as * there are points, normalized between 0 and 1 */ void set_key_rgba(const float* key_rgba); @@ -830,7 +844,7 @@ class Image { * * @param[in] width width of the image data in pixels * @param[in] height height of the image data in pixels - * @param[in] image_data pointer to an array of width * height elements + * @param[in] image_data_rgb pointer to an array of width * height elements * interpreted as a row-major RGB image */ void set_image_rgb(size_t width, size_t height, @@ -841,7 +855,7 @@ class Image { * * @param[in] width width of the image data in pixels * @param[in] height height of the image data in pixels - * @param[in] image_data pointer to an array of width * height elements + * @param[in] image_data_rgba pointer to an array of width * height elements * interpreted as a row-major RGBA image */ void set_image_rgba(size_t width, size_t height, @@ -946,7 +960,7 @@ class Cuboid { /** * Set the color of the cuboid. * - * @param rgba @todo document me + * @param[in] rgba @todo document me */ void set_rgba(const vec4f& rgba); @@ -1046,6 +1060,15 @@ extern const size_t spezia_n; */ extern const float spezia_palette[][3]; +/** + * Spezia Cal Ref palette size in number of colors. + */ +extern const size_t spezia_cal_ref_n; +/** + * Spezia Cal Ref palette, RGB values per element. + */ +extern const float spezia_cal_ref_palette[][3]; + /** * Calibrated reflectifiy palette size in number of colors. */ @@ -1064,6 +1087,15 @@ extern const size_t grey_n; */ extern const float grey_palette[][3]; +/** + * Greyscale Cal Ref palette size in number of colors. + */ +extern const size_t grey_cal_ref_n; +/** + * Greyscale Cal Ref palette, RGB values per element. + */ +extern const float grey_cal_ref_palette[][3]; + /** * Viridis palette size in number of colors. */ @@ -1073,6 +1105,15 @@ extern const size_t viridis_n; */ extern const float viridis_palette[][3]; +/** + * Viridis Cal Ref palette size in number of colors. + */ +extern const size_t viridis_cal_ref_n; +/** + * Viridis Cal Ref palette, RGB values per element. + */ +extern const float viridis_cal_ref_palette[][3]; + /** * Magma palette size in number of colors. */ @@ -1082,5 +1123,14 @@ extern const size_t magma_n; */ extern const float magma_palette[][3]; +/** + * Magma Cal Ref palette size in number of colors. + */ +extern const size_t magma_cal_ref_n; +/** + * Magma Cal Ref palette, RGB values per element. + */ +extern const float magma_cal_ref_palette[][3]; + } // namespace viz } // namespace ouster diff --git a/ouster_viz/src/cloud.cpp b/ouster_viz/src/cloud.cpp index 230b6ac0..9cdb1d1f 100644 --- a/ouster_viz/src/cloud.cpp +++ b/ouster_viz/src/cloud.cpp @@ -28,8 +28,9 @@ struct CloudIds { /** * constructor - * @param point_program_id handle to GLSL shader program compiled from - * point_vertex_shader_code and point_fragment_shader_code + * @param[in] point_program_id handle to GLSL shader program compiled from + * point_vertex_shader_code and + * point_fragment_shader_code */ explicit CloudIds(GLuint point_program_id) : xyz_id(glGetAttribLocation(point_program_id, "xyz")), @@ -80,8 +81,8 @@ GLCloud::~GLCloud() { /** * @brief Makes a key from the pair of (n, w) for use in maps. * - * @param n total cloud size - * @param w columns number + * @param[in] n total cloud size + * @param[in] w columns number * @return size_t hash of the pair (n, w) to use as a map key */ static inline size_t ti_key(size_t n, size_t w) { diff --git a/ouster_viz/src/colormaps.h b/ouster_viz/src/colormaps.h index 585dc63e..b88cc336 100644 --- a/ouster_viz/src/colormaps.h +++ b/ouster_viz/src/colormaps.h @@ -26,266 +26,528 @@ inline float** genPalette(const int n, const float from[3], const float to[3]) { // https://daniel.lawrence.lu/public/colortransform/#0_2423_964_352_6_2624_1000_513_11_3248_1000_617_15_415_1000_774 const size_t spezia_n = 256; const float spezia_palette[spezia_n][3] = { - {0.04890922165917825f, 0.34265700288230266f, 0.5139042200196196f}, - {0.04895672077739804f, 0.34399228711079705f, 0.5173325088859984f}, - {0.04899969158023907f, 0.34532432182766976f, 0.5207851330769154f}, - {0.049038068929181285f, 0.34665300013643424f, 0.5242624999557384f}, - {0.0490717860366443f, 0.3479782119131098f, 0.5277650273921529f}, - {0.04910077440233592f, 0.34929984367863964f, 0.5312931441090918f}, - {0.04912496374647964f, 0.35061777846523556f, 0.5348472900437968f}, - {0.049144281939685876f, 0.35193189567631167f, 0.5384279167237124f}, - {0.04915865492929047f, 0.3532420709396423f, 0.5420354876579142f}, - {0.04916800666192803f, 0.3545481759533582f, 0.5456704787448663f}, - {0.04917225900211732f, 0.3558500783243678f, 0.5493333786972924f}, - {0.04917133164659893f, 0.35714764139876426f, 0.553024689485032f}, - {0.0491651420341628f, 0.35844072408375016f, 0.5567449267967906f}, - {0.049153605250673076f, 0.35972918066057785f, 0.5604946205217287f}, - {0.04913663392897654f, 0.36101286058797066f, 0.5642743152519267f}, - {0.04911413814335756f, 0.36229160829545354f, 0.5680845708067875f}, - {0.04908602529819959f, 0.36356526296598163f, 0.5719259627805287f}, - {0.04905220001042406f, 0.36483365830721187f, 0.5757990831139734f}, - {0.04901256398533129f, 0.36609662231071893f, 0.5797045406919258f}, - {0.04896701588534969f, 0.36735397699840217f, 0.5836429619674972f}, - {0.04891545119124254f, 0.36860553815528246f, 0.5876149916148347f}, - {0.04885776205520153f, 0.36985111504782353f, 0.5916212932117864f}, - {0.048793837145294165f, 0.371090510126853f, 0.5956625499541581f}, - {0.048723561480604215f, 0.37232351871408936f, 0.5997394654032839f}, - {0.04864681625641982f, 0.37354992867120285f, 0.6038527642687842f}, - {0.0485634786587359f, 0.37476952005026626f, 0.6080031932284756f}, - {0.04847342166723854f, 0.3759820647243526f, 0.6121915217875443f}, - {0.04837651384597603f, 0.37718732599695254f, 0.6164185431792271f}, - {0.04827261912068898f, 0.3783850581887729f, 0.6206850753093874f}, - {0.04816159654185025f, 0.37957500620037093f, 0.6249919617475522f}, - {0.04804330003224206f, 0.38075690504895116f, 0.6293400727671268f}, - {0.047917578117875524f, 0.3819304793775204f, 0.633730306437712f}, - {0.04778427364089425f, 0.38309544293445374f, 0.6381635897726399f}, - {0.04764322345301101f, 0.38425149802135766f, 0.6426408799350484f}, - {0.04749425808786458f, 0.385398334906948f, 0.6471631655060938f}, - {0.04733720141054259f, 0.3865356312044689f, 0.6517314678190856f}, - {0.04717187024231324f, 0.3876630512099673f, 0.6563468423636755f}, - {0.046998073958454976f, 0.38878024519851034f, 0.6610103802644818f}, - {0.046815614056824016f, 0.3898868486751851f, 0.6657232098388559f}, - {0.04662428369457814f, 0.3909824815774357f, 0.6704864982388766f}, - {0.04642386719018477f, 0.39206674742499825f, 0.6753014531830023f}, - {0.04621413948754389f, 0.39313923241335524f, 0.6801693247832367f}, - {0.045994865578738504f, 0.3941995044462622f, 0.6850914074741193f}, - {0.04576579988147745f, 0.39524711210249736f, 0.6900690420503143f}, - {0.04552668556693947f, 0.3962815835315315f, 0.6951036178201221f}, - {0.04527725383318241f, 0.39730242527232407f, 0.7001965748827989f}, - {0.04501722311872807f, 0.39830912098889804f, 0.7053494065382041f}, - {0.04474629825033485f, 0.39930113011574186f, 0.7105636618379779f}, - {0.044464169518219306f, 0.4002778864054065f, 0.7158409482881979f}, - {0.044170511671191286f, 0.4012387963699213f, 0.7211829347142875f}, - {0.04386498282321687f, 0.4021832376068135f, 0.7265913542998228f}, - {0.04354722326188234f, 0.4031105569995846f, 0.7320680078119023f}, - {0.04321685414797862f, 0.40402006878146585f, 0.7376147670267773f}, - {0.0428734760940282f, 0.40491105245010933f, 0.743233578370643f}, - {0.042516667607970175f, 0.40578275051957646f, 0.748926466791789f}, - {0.04214598338630927f, 0.4066343660945334f, 0.7546955398817109f}, - {0.04176095243886018f, 0.40746506024993384f, 0.7605429922643745f}, - {0.04136107602475044f, 0.40827394919762916f, 0.766471110274553f}, - {0.04094582537627162f, 0.4090601012192915f, 0.7724822769480404f}, - {0.04051463918382638f, 0.40982253334270374f, 0.7785789773486957f}, - {0.040061502782456945f, 0.4105602077358398f, 0.7847638042595603f}, - {0.03959294089889664f, 0.41127202779018696f, 0.7910394642679004f}, - {0.039109793546916495f, 0.4119568338613871f, 0.7974087842769024f}, - {0.03861172210191932f, 0.41261339863144436f, 0.803874718479878f}, - {0.0380983735795864f, 0.4132404220523802f, 0.8104403558364525f}, - {0.03756937968562651f, 0.4138365258262561f, 0.8171089280940507f}, - {0.03702435578736771f, 0.4144002473707861f, 0.8238838184024792f}, - {0.0364628997996382f, 0.4149300332132621f, 0.8307685705742502f}, - {0.03588459097638143f, 0.4154242317480496f, 0.8377668990487521f}, - {0.035288988598694025f, 0.4158810852842974f, 0.844882699624589f}, - {0.03467563054866628f, 0.4162987213006144f, 0.8521200610312002f}, - {0.03404403175731731f, 0.41667514281199364f, 0.8594832774186676f}, - {0.033393682513460185f, 0.41700821774098445f, 0.8669768618532854f}, - {0.03272404661867004f, 0.41729566716967786f, 0.8746055609162682f}, - {0.032034559371859575f, 0.4175350523310705f, 0.8823743705140761f}, - {0.03132462536474723f, 0.41772376017735885f, 0.8902885530212784f}, - {0.03059361606719027f, 0.417858987338036f, 0.8983536558911435f}, - {0.029840867178669222f, 0.41793772225168413f, 0.9065755318852089f}, - {0.02906567571902483f, 0.4179567252211435f, 0.9149603610913213f}, - {0.028267296828018075f, 0.41791250610119823f, 0.9235146749206897f}, - {0.027444940239127507f, 0.41780129927982523f, 0.9322453822980893f}, - {0.026597766388240202f, 0.4176190355565933f, 0.9411597982868389f}, - {0.02572488211232861f, 0.41736131045306674f, 0.9502656754213602f}, - {0.02482533588680886f, 0.41702334840740857f, 0.9595712380560552f}, - {0.023898112542860842f, 0.416599962205498f, 0.9690852200808441f}, - {0.02294212739712791f, 0.41608550687982504f, 0.9788169064013666f}, - {0.02195621971619119f, 0.4154738271597193f, 0.9887761786374855f}, - {0.03533572637548167f, 0.4150344767837667f, 0.9966419438918287f}, - {0.08206748636661013f, 0.4154760610454022f, 0.996875442497312f}, - {0.1131664468320158f, 0.4159292422424467f, 0.9971067037505105f}, - {0.1377759789309851f, 0.4163940123475041f, 0.9973357493609963f}, - {0.1586260932452447f, 0.4168703621191211f, 0.9975626007042689f}, - {0.17695881259992585f, 0.41735828111703227f, 0.997787278826484f}, - {0.19346029551091778f, 0.4178577577177723f, 0.9980098044491156f}, - {0.2085556849234767f, 0.4183687791306285f, 0.9982301979735458f}, - {0.22252938052310162f, 0.41889133141394447f, 0.9984484794855942f}, - {0.2355824089832244f, 0.4194253994917421f, 0.9986646687599702f}, - {0.24786290560296725f, 0.4199709671706614f, 0.9988787852646682f}, - {0.25948364869956886f, 0.42052801715720073f, 0.9990908481652964f}, - {0.2705327829044692f, 0.42109653107524325f, 0.9993008763293371f}, - {0.2810807045979947f, 0.4216764894838623f, 0.9995088883303488f}, - {0.2911846624744039f, 0.4222678718953844f, 0.9997149024521047f}, - {0.30089193496804306f, 0.4228706567937021f, 0.9999189366926701f}, - {0.3199598560384707f, 0.4211529467871777f, 1.0000000000000044f}, - {0.3436114893370144f, 0.4178742172053897f, 1.0000000000000047f}, - {0.36539676089694495f, 0.41458308629177515f, 1.0000000000000044f}, - {0.3856661632570949f, 0.41127775518053283f, 1.0000000000000042f}, - {0.404675301565696f, 0.407956362084171f, 1.0000000000000044f}, - {0.4226172861700883f, 0.4046169767859018f, 1.0000000000000047f}, - {0.43964219386021874f, 0.40125759469274436f, 1.0000000000000047f}, - {0.45586938841351193f, 0.3978761303980185f, 1.0000000000000047f}, - {0.47139565849043324f, 0.39447041069519134f, 1.0000000000000047f}, - {0.4863007849418988f, 0.3910381669772773f, 1.0000000000000047f}, - {0.5006514638539757f, 0.3875770269469873f, 1.0000000000000044f}, - {0.5145041416968924f, 0.3840845055522841f, 1.0000000000000047f}, - {0.5279071095300848f, 0.3805579950497078f, 1.0000000000000047f}, - {0.5409020797263486f, 0.3769947540834305f, 1.0000000000000044f}, - {0.5535253932438766f, 0.3733918956509583f, 1.0000000000000044f}, - {0.5658089579546876f, 0.3697463738064324f, 1.0000000000000042f}, - {0.577780987780821f, 0.366054968928604f, 1.0000000000000049f}, - {0.589466591997403f, 0.3623142713523205f, 1.0000000000000047f}, - {0.6008882502481963f, 0.35852066312849035f, 1.0000000000000044f}, - {0.6120661992793963f, 0.3546702976368881f, 1.0000000000000047f}, - {0.6230187506929341f, 0.35075907672718176f, 1.0000000000000047f}, - {0.6337625542333337f, 0.34678262500419443f, 1.0000000000000047f}, - {0.6443128176539651f, 0.3427362608011279f, 1.0000000000000044f}, - {0.6546834916623888f, 0.33861496329592544f, 1.0000000000000047f}, - {0.664887426552217f, 0.3344133351169368f, 1.0000000000000044f}, - {0.6749365057066918f, 0.3301255596489445f, 1.0000000000000047f}, - {0.6848417600790246f, 0.32574535208217403f, 1.0000000000000047f}, - {0.6946134669261637f, 0.32126590303548275f, 1.0000000000000049f}, - {0.7042612354316643f, 0.31667981331755896f, 1.0000000000000047f}, - {0.7137940813531695f, 0.3119790180493533f, 1.0000000000000049f}, - {0.7232204924365964f, 0.3071546979334297f, 1.0000000000000049f}, - {0.7325484860275505f, 0.30219717488892517f, 1.0000000000000047f}, - {0.7417856600618409f, 0.2970957885292609f, 1.000000000000005f}, - {0.7509392384175178f, 0.2918387489798506f, 1.0000000000000047f}, - {0.760016111449703f, 0.28641296022435003f, 1.0000000000000047f}, - {0.7690228723986646f, 0.2808038063993306f, 1.0000000000000049f}, - {0.7779658502549104f, 0.27499489103633235f, 1.0000000000000049f}, - {0.7868511395774846f, 0.2689677158905533f, 1.0000000000000047f}, - {0.7956846276897148f, 0.26270128126132847f, 1.0000000000000047f}, - {0.804472019617065f, 0.2561715829275765f, 1.0000000000000047f}, - {0.8132188610824966f, 0.2493509709254887f, 1.0000000000000047f}, - {0.8219305598337341f, 0.24220732066040862f, 1.0000000000000049f}, - {0.8306124055427538f, 0.23470294440057987f, 1.0000000000000049f}, - {0.8392695884894237f, 0.2267931361345682f, 1.0000000000000047f}, - {0.847907217217596f, 0.21842418639150069f, 1.0000000000000047f}, - {0.8565303353323375f, 0.20953060994411976f, 1.0000000000000049f}, - {0.8651439375907393f, 0.20003116767718654f, 1.0000000000000049f}, - {0.8737529854254381f, 0.18982297245453064f, 1.0000000000000049f}, - {0.8823624220291222f, 0.17877241522237444f, 1.0000000000000047f}, - {0.8909771871196978f, 0.1667005280966983f, 1.0000000000000047f}, - {0.8996022314990386f, 0.15335795616479617f, 1.000000000000005f}, - {0.9082425315133318f, 0.13837882372526109f, 1.0000000000000049f}, - {0.9169031035195819f, 0.12118667725012405f, 1.0000000000000049f}, - {0.9255890184609986f, 0.10077304980525353f, 1.0000000000000047f}, - {0.9343054166534386f, 0.07504334998300113f, 1.0000000000000049f}, - {0.9430575228859241f, 0.03781952178921804f, 1.000000000000005f}, - {0.9509350420238839f, 1.4218570765223148e-13f, 0.9989984483716071f}, - {0.9554497353124459f, 1.4191675612451605e-13f, 0.9943640499109371f}, - {0.9599176427714787f, 1.4433731987395504e-13f, 0.9897799632511853f}, - {0.9643412154073002f, 1.4245465917994694e-13f, 0.9852425190239346f}, - {0.9687227616942858f, 1.4191675612451605e-13f, 0.9807481714229297f}, - {0.9730644583865243f, 1.411995520506082e-13f, 0.9762934885028384f}, - {0.9773683603724937f, 1.3931689135660008e-13f, 0.9718751430792824f}, - {0.9816364096714153f, 1.3886863881040766e-13f, 0.9674899041721569f}, - {0.9858704436584534f, 1.4039269746746187e-13f, 0.9631346289394122f}, - {0.9900722025959202f, 1.4397871783700112e-13f, 0.9588062550529955f}, - {0.9942433365389557f, 1.4155815408756212e-13f, 0.954501793472642f}, - {0.9983854116765075f, 1.3752388117183045e-13f, 0.9502183215767478f}, - {0.9999999999999819f, 0.02804423714351181f, 0.9437140548413381f}, - {0.9999999999999823f, 0.0675265531658979f, 0.9359017685954015f}, - {0.9999999999999826f, 0.09447578037166751f, 0.9282451825736049f}, - {0.9999999999999823f, 0.11567880450339993f, 0.920737795368809f}, - {0.9999999999999826f, 0.13352190503381375f, 0.9133734552831144f}, - {0.9999999999999823f, 0.1491028314594674f, 0.906146335428585f}, - {0.9999999999999826f, 0.16303259275115084f, 0.8990509109121838f}, - {0.9999999999999826f, 0.17569199214531872f, 0.8920819378992011f}, - {0.9999999999999826f, 0.18733702217610845f, 0.8852344343724449f}, - {0.9999999999999826f, 0.19814940356609517f, 0.8785036624245576f}, - {0.9999999999999823f, 0.20826355122506324f, 0.8718851119384158f}, - {0.9999999999999823f, 0.21778214249596284f, 0.8653744855260821f}, - {0.9999999999999826f, 0.22678566871532468f, 0.8589676846103573f}, - {0.9999999999999823f, 0.2353385863611125f, 0.8526607965450058f}, - {0.9999999999999828f, 0.24349343831907827f, 0.8464500826803465f}, - {0.9999999999999826f, 0.2512937077092952f, 0.840331967290248f}, - {0.9999999999999826f, 0.2587758499993201f, 0.8343030272849384f}, - {0.999999999999983f, 0.26739099502162367f, 0.8275538904243963f}, - {0.999999999999983f, 0.2793555475103376f, 0.8187524096848618f}, - {0.9999999999999828f, 0.29067538241472596f, 0.810154074771914f}, - {0.999999999999983f, 0.3014349177286362f, 0.8017491111724352f}, - {0.9999999999999826f, 0.31170258039783083f, 0.7935283442712853f}, - {0.9999999999999826f, 0.3215347049761315f, 0.7854831467895685f}, - {0.9999999999999826f, 0.3309782925632311f, 0.7776053911816436f}, - {0.9999999999999826f, 0.3400730122474594f, 0.7698874064041857f}, - {0.9999999999999826f, 0.34885268450644075f, 0.7623219385454285f}, - {0.999999999999983f, 0.35734640143399626f, 0.7549021148665397f}, - {0.9999999999999826f, 0.3655793867737775f, 0.7476214108616114f}, - {0.9999999999999826f, 0.3735736659274856f, 0.7404736199894286f}, - {0.9999999999999828f, 0.381348594792351f, 0.7334528257702123f}, - {0.9999999999999826f, 0.38892128210540905f, 0.7265533759748873f}, - {0.9999999999999823f, 0.3963069303390571f, 0.7197698586639263f}, - {0.9999999999999823f, 0.4035191135203492f, 0.7130970798581467f}, - {0.9999999999999823f, 0.410570005644612f, 0.7065300426455539f}, - {0.9999999999999821f, 0.4174705699878856f, 0.700063927546916f}, - {0.9999999999999819f, 0.4242307171780247f, 0.6936940739785828f}, - {0.9999999999999821f, 0.4308594380852102f, 0.6874159626644994f}, - {0.9999999999999821f, 0.4373649162525338f, 0.6812251988606219f}, - {0.9999999999999819f, 0.44375462357781925f, 0.6751174962642902f}, - {0.9999999999999819f, 0.4500354021895003f, 0.6690886614886871f}, - {0.9999999999999821f, 0.45621353486890187f, 0.6631345789884755f}, - {0.9999999999999817f, 0.4622948059133914f, 0.657251196327135f}, - {0.9999999999999817f, 0.4682845539768576f, 0.6514345096795133f}, - {0.9999999999999817f, 0.474187718141824f, 0.645680549464667f}, - {0.9999999999999817f, 0.4800088782535285f, 0.6399853660042518f}, - {0.9999999999999815f, 0.4857522903672667f, 0.6343450151004509f}, - {0.9999999999999815f, 0.4914219180162633f, 0.6287555434246979f}, - {0.9999999999999815f, 0.497021459890778f, 0.6232129736041581f}, - {0.9999999999999815f, 0.5025543744242497f, 0.6177132888869281f}, - {0.9999999999999815f, 0.5080239017046412f, 0.6122524172590773f}, - {0.999999999999981f, 0.5134330830652836f, 0.606826214876734f}, - {0.9999999999999808f, 0.518784778656747f, 0.6014304486641499f}, - {0.9999999999999808f, 0.5240816832574693f, 0.5960607779137368f}, - {0.9999999999999806f, 0.5293263405443853f, 0.5907127347060119f}, - {0.9999999999999806f, 0.5345211560142691f, 0.5853817029456958f}, - {0.9999999999999808f, 0.5396684087209026f, 0.580062895784249f}, - {0.9999999999999808f, 0.5447702619716198f, 0.5747513311680923f}, - {0.9999999999999806f, 0.5498287731085955f, 0.5694418052146554f}, - {0.9999999999999803f, 0.5548459024848833f, 0.5641288630740176f}, - {0.9999999999999801f, 0.5598235217321937f, 0.5588067668806895f}, - {0.9999999999999799f, 0.5647634214064047f, 0.5534694603362047f}, - {0.9999999999999799f, 0.569667318087479f, 0.5481105293861371f}, - {0.9999999999999801f, 0.5745368610026079f, 0.5427231583620321f}, - {0.9999999999999797f, 0.5793736382348097f, 0.5373000808456486f}, - {0.9999999999999797f, 0.5841791825736894f, 0.5318335243749407f}, - {0.9999999999999797f, 0.58895497706055f, 0.5263151479421893f}, - {0.9999999999999795f, 0.5937024602763533f, 0.5207359710263567f}, - {0.9999999999999795f, 0.5984230314181602f, 0.5150862926436902f}, - {0.9999999999999792f, 0.6031180552074987f, 0.5093555985787912f}, - {0.9999999999999792f, 0.607788866672662f, 0.5035324545546109f}, - {0.999999999999979f, 0.6124367758461117f, 0.4976043825895365f}, - {0.999999999999979f, 0.6170630724180334f, 0.4915577171399405f}, - {0.9999999999999788f, 0.6216690303876014f, 0.48537743679248463f}, - {0.9999999999999788f, 0.6262559127547657f, 0.4790469661903673f}, - {0.9999999999999784f, 0.6308249762973255f, 0.4725479414659382f}, - {0.9999999999999786f, 0.6353774764808859f, 0.46585993058805514f}, - {0.9999999999999784f, 0.6399146725529954f, 0.45896009754439654f}, - {0.9999999999999784f, 0.644437832877538f, 0.45182279591800384f}, - {0.9999999999999781f, 0.6489482405714118f, 0.4444190728188997f}, - {0.9999999999999779f, 0.6534471995128909f, 0.4367160577509657f}, - {0.9999999999999779f, 0.6579360408000906f, 0.4286762020035964f}, - {0.9999999999999779f, 0.6624161297489367f, 0.42025632127341656f}, - {0.9999999999999777f, 0.6668888735333959f, 0.41140637540952824f}, - {0.9999999999999777f, 0.6713557295869282f, 0.40206789113388525f}, - {0.9999999999999775f, 0.6758182149038043f, 0.3921718908087272f}}; + {0.0489092208, 0.3426569998, 0.5139042139}, + {0.048956722, 0.3439922929, 0.5173324943}, + {0.0489996932, 0.3453243077, 0.5207851529}, + {0.0490380675, 0.3466530144, 0.5242624879}, + {0.0490717851, 0.3479782045, 0.5277650356}, + {0.0491007753, 0.3492998481, 0.5312931538}, + {0.0491249636, 0.3506177664, 0.5348473191}, + {0.0491442829, 0.3519318998, 0.5384278893}, + {0.0491586551, 0.3532420695, 0.5420354605}, + {0.0491680056, 0.3545481861, 0.5456704497}, + {0.0491722599, 0.3558500707, 0.5493333936}, + {0.0491713323, 0.357147634, 0.5530247092}, + {0.0491651408, 0.358440727, 0.5567449331}, + {0.0491536036, 0.3597291708, 0.5604946017}, + {0.0491366349, 0.3610128462, 0.5642743111}, + {0.0491141379, 0.3622916043, 0.5680845976}, + {0.0490860268, 0.3635652661, 0.5719259381}, + {0.0490522012, 0.364833653, 0.5757991076}, + {0.0490125641, 0.3660966158, 0.5797045231}, + {0.048967015, 0.3673539758, 0.5836429596}, + {0.0489154495, 0.3686055243, 0.5876150131}, + {0.0488577634, 0.3698511124, 0.5916212797}, + {0.0487938374, 0.3710905015, 0.5956625342}, + {0.0487235598, 0.372323513, 0.5997394919}, + {0.0486468151, 0.3735499382, 0.6038527489}, + {0.0485634804, 0.3747695088, 0.6080031991}, + {0.0484734215, 0.3759820759, 0.6121914983}, + {0.0483765155, 0.3771873116, 0.6164185405}, + {0.0482726209, 0.378385067, 0.6206851006}, + {0.0481615961, 0.3795750141, 0.6249919534}, + {0.0480432995, 0.3807569146, 0.6293400526}, + {0.0479175784, 0.3819304705, 0.6337302923}, + {0.0477842726, 0.3830954432, 0.6381635666}, + {0.0476432219, 0.3842515051, 0.6426408887}, + {0.0474942587, 0.3853983283, 0.6471631527}, + {0.0473372005, 0.3865356445, 0.6517314911}, + {0.0471718684, 0.3876630366, 0.6563468575}, + {0.0469980724, 0.3887802362, 0.6610103846}, + {0.0468156151, 0.3898868561, 0.6657232046}, + {0.0466242842, 0.3909824789, 0.6704865098}, + {0.0464238673, 0.392066747, 0.6753014326}, + {0.046214141, 0.3931392431, 0.6801693439}, + {0.0459948666, 0.3941994905, 0.6850914359}, + {0.0457657985, 0.3952471018, 0.6900690198}, + {0.0455266871, 0.3962815702, 0.6951036453}, + {0.0452772528, 0.3973024189, 0.7001965642}, + {0.0450172238, 0.3983091116, 0.7053493857}, + {0.0447462983, 0.3993011415, 0.7105636597}, + {0.0444641709, 0.4002778828, 0.7158409357}, + {0.04417051, 0.4012387991, 0.7211829424}, + {0.0438649841, 0.4021832347, 0.7265913486}, + {0.0435472243, 0.4031105638, 0.7320680022}, + {0.0432168543, 0.404020071, 0.7376147509}, + {0.0428734757, 0.4049110413, 0.7432335615}, + {0.0425166674, 0.4057827592, 0.7489264607}, + {0.0421459824, 0.4066343606, 0.7546955347}, + {0.0417609513, 0.4074650705, 0.7605429888}, + {0.0413610749, 0.4082739353, 0.7664710879}, + {0.0409458242, 0.4090600908, 0.772482276}, + {0.0405146405, 0.4098225236, 0.7785789967}, + {0.0400615036, 0.4105602205, 0.784763813}, + {0.0395929404, 0.4112720191, 0.7910394669}, + {0.0391097926, 0.4119568467, 0.7974087596}, + {0.0386117212, 0.4126133919, 0.8038747311}, + {0.0380983725, 0.4132404327, 0.8104403615}, + {0.0375693813, 0.4138365388, 0.8171089292}, + {0.0370243564, 0.4144002497, 0.8238838315}, + {0.0364628993, 0.4149300456, 0.8307685852}, + {0.0358845927, 0.4154242277, 0.8377668858}, + {0.0352889895, 0.4158810973, 0.8448827267}, + {0.0346756317, 0.4162987173, 0.8521200418}, + {0.0340440311, 0.4166751504, 0.8594833016}, + {0.033393681, 0.4170082211, 0.8669768572}, + {0.0327240452, 0.4172956645, 0.8746055365}, + {0.032034561, 0.4175350666, 0.8823743463}, + {0.031324625, 0.4177237749, 0.8902885318}, + {0.0305936169, 0.417858988, 0.8983536363}, + {0.029840868, 0.4179377258, 0.9065755606}, + {0.029065676, 0.4179567397, 0.9149603844}, + {0.028267296, 0.417912513, 0.9235146642}, + {0.0274449401, 0.4178012908, 0.9322453737}, + {0.0265977662, 0.4176190495, 0.9411597848}, + {0.0257248823, 0.4173613191, 0.950265646}, + {0.0248253364, 0.4170233607, 0.9595712423}, + {0.0238981117, 0.4165999591, 0.9690852165}, + {0.0229421277, 0.4160855114, 0.9788169265}, + {0.0219562203, 0.4154738188, 0.988776207}, + {0.035335727, 0.4150344729, 0.9966419339}, + {0.0820674896, 0.415476054, 0.9968754649}, + {0.113166444, 0.4159292281, 0.9971067309}, + {0.1377759725, 0.4163940251, 0.997335732}, + {0.1586260945, 0.4168703556, 0.9975625873}, + {0.1769588143, 0.4173582792, 0.9977872968}, + {0.1934603006, 0.4178577662, 0.9980098009}, + {0.2085556835, 0.4183687866, 0.9982302189}, + {0.2225293815, 0.4188913405, 0.9984484911}, + {0.2355824113, 0.4194253981, 0.9986646771}, + {0.2478629053, 0.4199709594, 0.998878777}, + {0.2594836354, 0.4205280244, 0.9990908504}, + {0.2705327868, 0.4210965335, 0.9993008971}, + {0.281080693, 0.4216764867, 0.9995089173}, + {0.2911846638, 0.422267884, 0.999714911}, + {0.3008919358, 0.4228706658, 0.9999189377}, + {0.3199598491, 0.4211529493, 1.0}, + {0.3436114788, 0.417874217, 1.0}, + {0.3653967679, 0.414583087, 1.0}, + {0.3856661618, 0.4112777412, 1.0}, + {0.4046753049, 0.4079563618, 1.0}, + {0.4226172864, 0.4046169817, 1.0}, + {0.4396421909, 0.4012576044, 1.0}, + {0.4558693767, 0.3978761435, 1.0}, + {0.4713956714, 0.3944704235, 1.0}, + {0.4863007963, 0.3910381794, 1.0}, + {0.5006514788, 0.3875770271, 1.0}, + {0.5145041347, 0.3840844929, 1.0}, + {0.5279071331, 0.3805579841, 1.0}, + {0.5409020782, 0.3769947588, 1.0}, + {0.5535253882, 0.3733918965, 1.0}, + {0.5658089519, 0.369746387, 1.0}, + {0.577780962, 0.3660549819, 1.0}, + {0.5894665718, 0.3623142838, 1.0}, + {0.6008882523, 0.3585206568, 1.0}, + {0.6120662093, 0.3546702862, 1.0}, + {0.6230187416, 0.350759089, 1.0}, + {0.6337625384, 0.3467826247, 1.0}, + {0.644312799, 0.342736274, 1.0}, + {0.6546834707, 0.3386149704, 1.0}, + {0.6648874283, 0.3344133496, 1.0}, + {0.674936533, 0.3301255703, 1.0}, + {0.6848417521, 0.3257453442, 1.0}, + {0.6946134567, 0.3212659061, 1.0}, + {0.7042612433, 0.3166798055, 1.0}, + {0.7137940526, 0.3119790256, 1.0}, + {0.7232204676, 0.3071546853, 1.0}, + {0.7325484753, 0.3021971881, 1.0}, + {0.7417856455, 0.2970957756, 1.0}, + {0.75093925, 0.2918387353, 1.0}, + {0.7600160837, 0.2864129543, 1.0}, + {0.769022882, 0.2808037996, 1.0}, + {0.7779658437, 0.27499488, 1.0}, + {0.7868511677, 0.2689677179, 1.0}, + {0.7956846356, 0.262701273, 1.0}, + {0.8044720292, 0.2561715841, 1.0}, + {0.813218832, 0.249350965, 1.0}, + {0.8219305873, 0.2422073185, 1.0}, + {0.830612421, 0.2347029448, 1.0}, + {0.8392695785, 0.2267931402, 1.0}, + {0.8479072452, 0.2184241861, 1.0}, + {0.8565303087, 0.2095306069, 1.0}, + {0.8651439548, 0.2000311613, 1.0}, + {0.8737530112, 0.1898229718, 1.0}, + {0.8823624253, 0.1787724197, 1.0}, + {0.8909772038, 0.1667005271, 1.0}, + {0.8996022344, 0.1533579528, 1.0}, + {0.9082425237, 0.1383788288, 1.0}, + {0.9169030786, 0.1211866736, 1.0}, + {0.925589025, 0.1007730514, 1.0}, + {0.9343054295, 0.0750433505, 1.0}, + {0.9430575371, 0.0378195234, 1.0}, + {0.9509350657, 0.0, 0.9989984632}, + {0.95544976, 0.0, 0.9943640232}, + {0.9599176645, 0.0, 0.9897799492}, + {0.9643412232, 0.0, 0.9852425456}, + {0.9687227607, 0.0, 0.9807481766}, + {0.9730644822, 0.0, 0.9762935042}, + {0.9773683548, 0.0, 0.9718751311}, + {0.981636405, 0.0, 0.9674898982}, + {0.9858704209, 0.0, 0.9631346464}, + {0.9900721908, 0.0, 0.9588062763}, + {0.9942433238, 0.0, 0.9545018077}, + {0.9983854294, 0.0, 0.9502183199}, + {1.0, 0.0280442368, 0.9437140822}, + {1.0, 0.0675265566, 0.9359017611}, + {1.0, 0.0944757834, 0.9282451868}, + {1.0, 0.1156788021, 0.920737803}, + {1.0, 0.1335218996, 0.9133734703}, + {1.0, 0.1491028368, 0.9061463475}, + {1.0, 0.1630325913, 0.8990508914}, + {1.0, 0.175691992, 0.8920819163}, + {1.0, 0.187337026, 0.8852344155}, + {1.0, 0.198149398, 0.8785036802}, + {1.0, 0.2082635462, 0.8718851209}, + {1.0, 0.2177821398, 0.8653745055}, + {1.0, 0.2267856747, 0.8589676619}, + {1.0, 0.2353385836, 0.8526607752}, + {1.0, 0.2434934378, 0.8464500904}, + {1.0, 0.2512937188, 0.8403319716}, + {1.0, 0.2587758601, 0.8343030214}, + {1.0, 0.2673909962, 0.8275538683}, + {1.0, 0.2793555558, 0.818752408}, + {1.0, 0.2906753719, 0.8101540804}, + {1.0, 0.3014349043, 0.8017491102}, + {1.0, 0.3117025793, 0.7935283184}, + {1.0, 0.3215346932, 0.7854831219}, + {1.0, 0.3309783041, 0.7776054144}, + {1.0, 0.3400730193, 0.7698873878}, + {1.0, 0.348852694, 0.762321949}, + {1.0, 0.3573464155, 0.7549021244}, + {1.0, 0.3655793965, 0.747621417}, + {1.0, 0.3735736609, 0.740473628}, + {1.0, 0.3813485801, 0.7334527969}, + {1.0, 0.3889212906, 0.7265533805}, + {1.0, 0.396306932, 0.7197698355}, + {1.0, 0.4035191238, 0.7130970955}, + {1.0, 0.4105699956, 0.7065300345}, + {1.0, 0.4174705744, 0.7000639439}, + {1.0, 0.4242307246, 0.6936940551}, + {1.0, 0.4308594465, 0.6874159575}, + {1.0, 0.4373649061, 0.6812251806}, + {1.0, 0.4437546134, 0.6751174927}, + {1.0, 0.4500353932, 0.6690886617}, + {1.0, 0.4562135339, 0.6631345749}, + {1.0, 0.462294817, 0.6572511792}, + {1.0, 0.4682845473, 0.6514344811}, + {1.0, 0.4741877317, 0.6456805468}, + {1.0, 0.4800088704, 0.6399853826}, + {1.0, 0.4857522845, 0.634344995}, + {1.0, 0.4914219081, 0.6287555695}, + {1.0, 0.4970214665, 0.6232129931}, + {1.0, 0.5025543571, 0.6177132726}, + {1.0, 0.5080239177, 0.6122524142}, + {1.0, 0.5134330988, 0.6068261862}, + {1.0, 0.5187847614, 0.6014304757}, + {1.0, 0.524081707, 0.5960607529}, + {1.0, 0.5293263197, 0.5907127261}, + {1.0, 0.5345211625, 0.5853816867}, + {1.0, 0.5396683812, 0.5800628662}, + {1.0, 0.5447702408, 0.5747513175}, + {1.0, 0.5498287678, 0.5694417953}, + {1.0, 0.5548459291, 0.5641288757}, + {1.0, 0.559823513, 0.558806777}, + {1.0, 0.5647634268, 0.5534694791}, + {1.0, 0.5696673393, 0.5481105447}, + {1.0, 0.57453686, 0.5427231789}, + {1.0, 0.5793736577, 0.5373001099}, + {1.0, 0.584179163, 0.5318335295}, + {1.0, 0.5889549851, 0.5263151526}, + {1.0, 0.5937024355, 0.5207359791}, + {1.0, 0.5984230042, 0.5150862932}, + {1.0, 0.603118062, 0.5093556046}, + {1.0, 0.6077888608, 0.5035324693}, + {1.0, 0.6124367714, 0.4976043701}, + {1.0, 0.6170630455, 0.4915577173}, + {1.0, 0.621669054, 0.4853774309}, + {1.0, 0.6262559295, 0.4790469706}, + {1.0, 0.6308249831, 0.4725479484}, + {1.0, 0.6353774667, 0.4658599198}, + {1.0, 0.6399146914, 0.4589600861}, + {1.0, 0.6444378495, 0.4518227875}, + {1.0, 0.6489482522, 0.444419086}, + {1.0, 0.6534472108, 0.4367160499}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6758182049, 0.3921718895}, +}; + + +const size_t spezia_cal_ref_n = 256; +const float spezia_cal_ref_palette[spezia_cal_ref_n][3] = { + {0.0489092208, 0.3426569998, 0.5139042139}, + {0.0490717851, 0.3479782045, 0.5277650356}, + {0.0491680056, 0.3545481861, 0.5456704497}, + {0.0491366349, 0.3610128462, 0.5642743111}, + {0.048967015, 0.3673539758, 0.5836429596}, + {0.0486468151, 0.3735499382, 0.6038527489}, + {0.0482726209, 0.378385067, 0.6206851006}, + {0.0476432219, 0.3842515051, 0.6426408887}, + {0.0469980724, 0.3887802362, 0.6610103846}, + {0.0459948666, 0.3941994905, 0.6850914359}, + {0.0450172238, 0.3983091116, 0.7053493857}, + {0.0438649841, 0.4021832347, 0.7265913486}, + {0.0425166674, 0.4057827592, 0.7489264607}, + {0.0409458242, 0.4090600908, 0.772482276}, + {0.0391097926, 0.4119568467, 0.7974087596}, + {0.0370243564, 0.4144002497, 0.8238838315}, + {0.0346756317, 0.4162987173, 0.8521200418}, + {0.032034561, 0.4175350666, 0.8823743463}, + {0.029065676, 0.4179567397, 0.9149603844}, + {0.0257248823, 0.4173613191, 0.950265646}, + {0.0229421277, 0.4160855114, 0.9788169265}, + {0.113166444, 0.4159292281, 0.9971067309}, + {0.1769588143, 0.4173582792, 0.9977872968}, + {0.2355824113, 0.4194253981, 0.9986646771}, + {0.2705327868, 0.4210965335, 0.9993008971}, + {0.3199598491, 0.4211529493, 1.0}, + {0.3856661618, 0.4112777412, 1.0}, + {0.4396421909, 0.4012576044, 1.0}, + {0.4863007963, 0.3910381794, 1.0}, + {0.5409020782, 0.3769947588, 1.0}, + {0.577780962, 0.3660549819, 1.0}, + {0.6120662093, 0.3546702862, 1.0}, + {0.644312799, 0.342736274, 1.0}, + {0.6648874283, 0.3344133496, 1.0}, + {0.6946134567, 0.3212659061, 1.0}, + {0.7232204676, 0.3071546853, 1.0}, + {0.75093925, 0.2918387353, 1.0}, + {0.7779658437, 0.27499488, 1.0}, + {0.7956846356, 0.262701273, 1.0}, + {0.8219305873, 0.2422073185, 1.0}, + {0.8392695785, 0.2267931402, 1.0}, + {0.8651439548, 0.2000311613, 1.0}, + {0.8823624253, 0.1787724197, 1.0}, + {0.9082425237, 0.1383788288, 1.0}, + {0.925589025, 0.1007730514, 1.0}, + {0.9509350657, 0.0, 0.9989984632}, + {0.9599176645, 0.0, 0.9897799492}, + {0.9687227607, 0.0, 0.9807481766}, + {0.9773683548, 0.0, 0.9718751311}, + {0.9858704209, 0.0, 0.9631346464}, + {0.9983854294, 0.0, 0.9502183199}, + {1.0, 0.0675265566, 0.9359017611}, + {1.0, 0.1156788021, 0.920737803}, + {1.0, 0.1491028368, 0.9061463475}, + {1.0, 0.175691992, 0.8920819163}, + {1.0, 0.198149398, 0.8785036802}, + {1.0, 0.2082635462, 0.8718851209}, + {1.0, 0.2267856747, 0.8589676619}, + {1.0, 0.2434934378, 0.8464500904}, + {1.0, 0.2587758601, 0.8343030214}, + {1.0, 0.2793555558, 0.818752408}, + {1.0, 0.2906753719, 0.8101540804}, + {1.0, 0.3117025793, 0.7935283184}, + {1.0, 0.3309783041, 0.7776054144}, + {1.0, 0.3400730193, 0.7698873878}, + {1.0, 0.3573464155, 0.7549021244}, + {1.0, 0.3655793965, 0.747621417}, + {1.0, 0.3813485801, 0.7334527969}, + {1.0, 0.3889212906, 0.7265533805}, + {1.0, 0.4035191238, 0.7130970955}, + {1.0, 0.4105699956, 0.7065300345}, + {1.0, 0.4174705744, 0.7000639439}, + {1.0, 0.4308594465, 0.6874159575}, + {1.0, 0.4373649061, 0.6812251806}, + {1.0, 0.4437546134, 0.6751174927}, + {1.0, 0.4562135339, 0.6631345749}, + {1.0, 0.462294817, 0.6572511792}, + {1.0, 0.4682845473, 0.6514344811}, + {1.0, 0.4741877317, 0.6456805468}, + {1.0, 0.4800088704, 0.6399853826}, + {1.0, 0.4914219081, 0.6287555695}, + {1.0, 0.4970214665, 0.6232129931}, + {1.0, 0.5025543571, 0.6177132726}, + {1.0, 0.5080239177, 0.6122524142}, + {1.0, 0.5134330988, 0.6068261862}, + {1.0, 0.5187847614, 0.6014304757}, + {1.0, 0.524081707, 0.5960607529}, + {1.0, 0.5293263197, 0.5907127261}, + {1.0, 0.5345211625, 0.5853816867}, + {1.0, 0.5396683812, 0.5800628662}, + {1.0, 0.5396683812, 0.5800628662}, + {1.0, 0.5447702408, 0.5747513175}, + {1.0, 0.5498287678, 0.5694417953}, + {1.0, 0.5548459291, 0.5641288757}, + {1.0, 0.559823513, 0.558806777}, + {1.0, 0.5647634268, 0.5534694791}, + {1.0, 0.5647634268, 0.5534694791}, + {1.0, 0.5696673393, 0.5481105447}, + {1.0, 0.57453686, 0.5427231789}, + {1.0, 0.5793736577, 0.5373001099}, + {1.0, 0.5793736577, 0.5373001099}, + {1.0, 0.584179163, 0.5318335295}, + {1.0, 0.5889549851, 0.5263151526}, + {1.0, 0.5889549851, 0.5263151526}, + {1.0, 0.5937024355, 0.5207359791}, + {1.0, 0.5937024355, 0.5207359791}, + {1.0, 0.5984230042, 0.5150862932}, + {1.0, 0.603118062, 0.5093556046}, + {1.0, 0.603118062, 0.5093556046}, + {1.0, 0.6077888608, 0.5035324693}, + {1.0, 0.6077888608, 0.5035324693}, + {1.0, 0.6124367714, 0.4976043701}, + {1.0, 0.6124367714, 0.4976043701}, + {1.0, 0.6170630455, 0.4915577173}, + {1.0, 0.6170630455, 0.4915577173}, + {1.0, 0.621669054, 0.4853774309}, + {1.0, 0.621669054, 0.4853774309}, + {1.0, 0.6262559295, 0.4790469706}, + {1.0, 0.6262559295, 0.4790469706}, + {1.0, 0.6262559295, 0.4790469706}, + {1.0, 0.6308249831, 0.4725479484}, + {1.0, 0.6308249831, 0.4725479484}, + {1.0, 0.6353774667, 0.4658599198}, + {1.0, 0.6353774667, 0.4658599198}, + {1.0, 0.6353774667, 0.4658599198}, + {1.0, 0.6399146914, 0.4589600861}, + {1.0, 0.6399146914, 0.4589600861}, + {1.0, 0.6399146914, 0.4589600861}, + {1.0, 0.6444378495, 0.4518227875}, + {1.0, 0.6444378495, 0.4518227875}, + {1.0, 0.6444378495, 0.4518227875}, + {1.0, 0.6489482522, 0.444419086}, + {1.0, 0.6489482522, 0.444419086}, + {1.0, 0.6489482522, 0.444419086}, + {1.0, 0.6489482522, 0.444419086}, + {1.0, 0.6534472108, 0.4367160499}, + {1.0, 0.6534472108, 0.4367160499}, + {1.0, 0.6534472108, 0.4367160499}, + {1.0, 0.6534472108, 0.4367160499}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6579360366, 0.428676188}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6624161005, 0.4202563167}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6668888927, 0.411406368}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6713557243, 0.4020678997}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, + {1.0, 0.6758182049, 0.3921718895}, +}; const size_t calref_n = 256; const float calref_palette[calref_n][3] = { - {0.1f, 0.1f, 0.1f}, + {0.37562745098039246f, 0.30080392156862746f, 0.6312941176470588f}, {0.36862745098039246f, 0.30980392156862746f, 0.6352941176470588f}, {0.3618608227604765f, 0.31856978085351784f, 0.6394463667820068f}, {0.3550941945405613f, 0.3273356401384083f, 0.643598615916955f}, @@ -543,262 +805,526 @@ const float calref_palette[calref_n][3] = { {0.6280661284121491f, 0.013302575932333749f, 0.26082276047673913f}}; const size_t grey_n = 256; -const float grey_palette[grey_n][3] = {{0.0f, 0.0f, 0.0f}, - {0.00392157f, 0.00392157f, 0.00392157f}, - {0.00784314f, 0.00784314f, 0.00784314f}, - {0.0117647f, 0.0117647f, 0.0117647f}, - {0.0156863f, 0.0156863f, 0.0156863f}, - {0.0196078f, 0.0196078f, 0.0196078f}, - {0.0235294f, 0.0235294f, 0.0235294f}, - {0.027451f, 0.027451f, 0.027451f}, - {0.0313726f, 0.0313726f, 0.0313726f}, - {0.0352941f, 0.0352941f, 0.0352941f}, - {0.0392157f, 0.0392157f, 0.0392157f}, - {0.0431373f, 0.0431373f, 0.0431373f}, - {0.0470588f, 0.0470588f, 0.0470588f}, - {0.0509804f, 0.0509804f, 0.0509804f}, - {0.054902f, 0.054902f, 0.054902f}, - {0.0588235f, 0.0588235f, 0.0588235f}, - {0.0627451f, 0.0627451f, 0.0627451f}, - {0.0666667f, 0.0666667f, 0.0666667f}, - {0.0705882f, 0.0705882f, 0.0705882f}, - {0.0745098f, 0.0745098f, 0.0745098f}, - {0.0784314f, 0.0784314f, 0.0784314f}, - {0.0823529f, 0.0823529f, 0.0823529f}, - {0.0862745f, 0.0862745f, 0.0862745f}, - {0.0901961f, 0.0901961f, 0.0901961f}, - {0.0941176f, 0.0941176f, 0.0941176f}, - {0.0980392f, 0.0980392f, 0.0980392f}, - {0.101961f, 0.101961f, 0.101961f}, - {0.105882f, 0.105882f, 0.105882f}, - {0.109804f, 0.109804f, 0.109804f}, - {0.113725f, 0.113725f, 0.113725f}, - {0.117647f, 0.117647f, 0.117647f}, - {0.121569f, 0.121569f, 0.121569f}, - {0.12549f, 0.12549f, 0.12549f}, - {0.129412f, 0.129412f, 0.129412f}, - {0.133333f, 0.133333f, 0.133333f}, - {0.137255f, 0.137255f, 0.137255f}, - {0.141176f, 0.141176f, 0.141176f}, - {0.145098f, 0.145098f, 0.145098f}, - {0.14902f, 0.14902f, 0.14902f}, - {0.152941f, 0.152941f, 0.152941f}, - {0.156863f, 0.156863f, 0.156863f}, - {0.160784f, 0.160784f, 0.160784f}, - {0.164706f, 0.164706f, 0.164706f}, - {0.168627f, 0.168627f, 0.168627f}, - {0.172549f, 0.172549f, 0.172549f}, - {0.176471f, 0.176471f, 0.176471f}, - {0.180392f, 0.180392f, 0.180392f}, - {0.184314f, 0.184314f, 0.184314f}, - {0.188235f, 0.188235f, 0.188235f}, - {0.192157f, 0.192157f, 0.192157f}, - {0.196078f, 0.196078f, 0.196078f}, - {0.2f, 0.2f, 0.2f}, - {0.203922f, 0.203922f, 0.203922f}, - {0.207843f, 0.207843f, 0.207843f}, - {0.211765f, 0.211765f, 0.211765f}, - {0.215686f, 0.215686f, 0.215686f}, - {0.219608f, 0.219608f, 0.219608f}, - {0.223529f, 0.223529f, 0.223529f}, - {0.227451f, 0.227451f, 0.227451f}, - {0.231373f, 0.231373f, 0.231373f}, - {0.235294f, 0.235294f, 0.235294f}, - {0.239216f, 0.239216f, 0.239216f}, - {0.243137f, 0.243137f, 0.243137f}, - {0.247059f, 0.247059f, 0.247059f}, - {0.25098f, 0.25098f, 0.25098f}, - {0.254902f, 0.254902f, 0.254902f}, - {0.258824f, 0.258824f, 0.258824f}, - {0.262745f, 0.262745f, 0.262745f}, - {0.266667f, 0.266667f, 0.266667f}, - {0.270588f, 0.270588f, 0.270588f}, - {0.27451f, 0.27451f, 0.27451f}, - {0.278431f, 0.278431f, 0.278431f}, - {0.282353f, 0.282353f, 0.282353f}, - {0.286275f, 0.286275f, 0.286275f}, - {0.290196f, 0.290196f, 0.290196f}, - {0.294118f, 0.294118f, 0.294118f}, - {0.298039f, 0.298039f, 0.298039f}, - {0.301961f, 0.301961f, 0.301961f}, - {0.305882f, 0.305882f, 0.305882f}, - {0.309804f, 0.309804f, 0.309804f}, - {0.313726f, 0.313726f, 0.313726f}, - {0.317647f, 0.317647f, 0.317647f}, - {0.321569f, 0.321569f, 0.321569f}, - {0.32549f, 0.32549f, 0.32549f}, - {0.329412f, 0.329412f, 0.329412f}, - {0.333333f, 0.333333f, 0.333333f}, - {0.337255f, 0.337255f, 0.337255f}, - {0.341176f, 0.341176f, 0.341176f}, - {0.345098f, 0.345098f, 0.345098f}, - {0.34902f, 0.34902f, 0.34902f}, - {0.352941f, 0.352941f, 0.352941f}, - {0.356863f, 0.356863f, 0.356863f}, - {0.360784f, 0.360784f, 0.360784f}, - {0.364706f, 0.364706f, 0.364706f}, - {0.368627f, 0.368627f, 0.368627f}, - {0.372549f, 0.372549f, 0.372549f}, - {0.376471f, 0.376471f, 0.376471f}, - {0.380392f, 0.380392f, 0.380392f}, - {0.384314f, 0.384314f, 0.384314f}, - {0.388235f, 0.388235f, 0.388235f}, - {0.392157f, 0.392157f, 0.392157f}, - {0.396078f, 0.396078f, 0.396078f}, - {0.4f, 0.4f, 0.4f}, - {0.403922f, 0.403922f, 0.403922f}, - {0.407843f, 0.407843f, 0.407843f}, - {0.411765f, 0.411765f, 0.411765f}, - {0.415686f, 0.415686f, 0.415686f}, - {0.419608f, 0.419608f, 0.419608f}, - {0.423529f, 0.423529f, 0.423529f}, - {0.427451f, 0.427451f, 0.427451f}, - {0.431373f, 0.431373f, 0.431373f}, - {0.435294f, 0.435294f, 0.435294f}, - {0.439216f, 0.439216f, 0.439216f}, - {0.443137f, 0.443137f, 0.443137f}, - {0.447059f, 0.447059f, 0.447059f}, - {0.45098f, 0.45098f, 0.45098f}, - {0.454902f, 0.454902f, 0.454902f}, - {0.458824f, 0.458824f, 0.458824f}, - {0.462745f, 0.462745f, 0.462745f}, - {0.466667f, 0.466667f, 0.466667f}, - {0.470588f, 0.470588f, 0.470588f}, - {0.47451f, 0.47451f, 0.47451f}, - {0.478431f, 0.478431f, 0.478431f}, - {0.482353f, 0.482353f, 0.482353f}, - {0.486275f, 0.486275f, 0.486275f}, - {0.490196f, 0.490196f, 0.490196f}, - {0.494118f, 0.494118f, 0.494118f}, - {0.498039f, 0.498039f, 0.498039f}, - {0.501961f, 0.501961f, 0.501961f}, - {0.505882f, 0.505882f, 0.505882f}, - {0.509804f, 0.509804f, 0.509804f}, - {0.513726f, 0.513726f, 0.513726f}, - {0.517647f, 0.517647f, 0.517647f}, - {0.521569f, 0.521569f, 0.521569f}, - {0.52549f, 0.52549f, 0.52549f}, - {0.529412f, 0.529412f, 0.529412f}, - {0.533333f, 0.533333f, 0.533333f}, - {0.537255f, 0.537255f, 0.537255f}, - {0.541176f, 0.541176f, 0.541176f}, - {0.545098f, 0.545098f, 0.545098f}, - {0.54902f, 0.54902f, 0.54902f}, - {0.552941f, 0.552941f, 0.552941f}, - {0.556863f, 0.556863f, 0.556863f}, - {0.560784f, 0.560784f, 0.560784f}, - {0.564706f, 0.564706f, 0.564706f}, - {0.568627f, 0.568627f, 0.568627f}, - {0.572549f, 0.572549f, 0.572549f}, - {0.576471f, 0.576471f, 0.576471f}, - {0.580392f, 0.580392f, 0.580392f}, - {0.584314f, 0.584314f, 0.584314f}, - {0.588235f, 0.588235f, 0.588235f}, - {0.592157f, 0.592157f, 0.592157f}, - {0.596078f, 0.596078f, 0.596078f}, - {0.6f, 0.6f, 0.6f}, - {0.603922f, 0.603922f, 0.603922f}, - {0.607843f, 0.607843f, 0.607843f}, - {0.611765f, 0.611765f, 0.611765f}, - {0.615686f, 0.615686f, 0.615686f}, - {0.619608f, 0.619608f, 0.619608f}, - {0.623529f, 0.623529f, 0.623529f}, - {0.627451f, 0.627451f, 0.627451f}, - {0.631373f, 0.631373f, 0.631373f}, - {0.635294f, 0.635294f, 0.635294f}, - {0.639216f, 0.639216f, 0.639216f}, - {0.643137f, 0.643137f, 0.643137f}, - {0.647059f, 0.647059f, 0.647059f}, - {0.65098f, 0.65098f, 0.65098f}, - {0.654902f, 0.654902f, 0.654902f}, - {0.658824f, 0.658824f, 0.658824f}, - {0.662745f, 0.662745f, 0.662745f}, - {0.666667f, 0.666667f, 0.666667f}, - {0.670588f, 0.670588f, 0.670588f}, - {0.67451f, 0.67451f, 0.67451f}, - {0.678431f, 0.678431f, 0.678431f}, - {0.682353f, 0.682353f, 0.682353f}, - {0.686275f, 0.686275f, 0.686275f}, - {0.690196f, 0.690196f, 0.690196f}, - {0.694118f, 0.694118f, 0.694118f}, - {0.698039f, 0.698039f, 0.698039f}, - {0.701961f, 0.701961f, 0.701961f}, - {0.705882f, 0.705882f, 0.705882f}, - {0.709804f, 0.709804f, 0.709804f}, - {0.713726f, 0.713726f, 0.713726f}, - {0.717647f, 0.717647f, 0.717647f}, - {0.721569f, 0.721569f, 0.721569f}, - {0.72549f, 0.72549f, 0.72549f}, - {0.729412f, 0.729412f, 0.729412f}, - {0.733333f, 0.733333f, 0.733333f}, - {0.737255f, 0.737255f, 0.737255f}, - {0.741176f, 0.741176f, 0.741176f}, - {0.745098f, 0.745098f, 0.745098f}, - {0.74902f, 0.74902f, 0.74902f}, - {0.752941f, 0.752941f, 0.752941f}, - {0.756863f, 0.756863f, 0.756863f}, - {0.760784f, 0.760784f, 0.760784f}, - {0.764706f, 0.764706f, 0.764706f}, - {0.768627f, 0.768627f, 0.768627f}, - {0.772549f, 0.772549f, 0.772549f}, - {0.776471f, 0.776471f, 0.776471f}, - {0.780392f, 0.780392f, 0.780392f}, - {0.784314f, 0.784314f, 0.784314f}, - {0.788235f, 0.788235f, 0.788235f}, - {0.792157f, 0.792157f, 0.792157f}, - {0.796078f, 0.796078f, 0.796078f}, - {0.8f, 0.8f, 0.8f}, - {0.803922f, 0.803922f, 0.803922f}, - {0.807843f, 0.807843f, 0.807843f}, - {0.811765f, 0.811765f, 0.811765f}, - {0.815686f, 0.815686f, 0.815686f}, - {0.819608f, 0.819608f, 0.819608f}, - {0.823529f, 0.823529f, 0.823529f}, - {0.827451f, 0.827451f, 0.827451f}, - {0.831373f, 0.831373f, 0.831373f}, - {0.835294f, 0.835294f, 0.835294f}, - {0.839216f, 0.839216f, 0.839216f}, - {0.843137f, 0.843137f, 0.843137f}, - {0.847059f, 0.847059f, 0.847059f}, - {0.85098f, 0.85098f, 0.85098f}, - {0.854902f, 0.854902f, 0.854902f}, - {0.858824f, 0.858824f, 0.858824f}, - {0.862745f, 0.862745f, 0.862745f}, - {0.866667f, 0.866667f, 0.866667f}, - {0.870588f, 0.870588f, 0.870588f}, - {0.87451f, 0.87451f, 0.87451f}, - {0.878431f, 0.878431f, 0.878431f}, - {0.882353f, 0.882353f, 0.882353f}, - {0.886275f, 0.886275f, 0.886275f}, - {0.890196f, 0.890196f, 0.890196f}, - {0.894118f, 0.894118f, 0.894118f}, - {0.898039f, 0.898039f, 0.898039f}, - {0.901961f, 0.901961f, 0.901961f}, - {0.905882f, 0.905882f, 0.905882f}, - {0.909804f, 0.909804f, 0.909804f}, - {0.913725f, 0.913725f, 0.913725f}, - {0.917647f, 0.917647f, 0.917647f}, - {0.921569f, 0.921569f, 0.921569f}, - {0.92549f, 0.92549f, 0.92549f}, - {0.929412f, 0.929412f, 0.929412f}, - {0.933333f, 0.933333f, 0.933333f}, - {0.937255f, 0.937255f, 0.937255f}, - {0.941176f, 0.941176f, 0.941176f}, - {0.945098f, 0.945098f, 0.945098f}, - {0.94902f, 0.94902f, 0.94902f}, - {0.952941f, 0.952941f, 0.952941f}, - {0.956863f, 0.956863f, 0.956863f}, - {0.960784f, 0.960784f, 0.960784f}, - {0.964706f, 0.964706f, 0.964706f}, - {0.968627f, 0.968627f, 0.968627f}, - {0.972549f, 0.972549f, 0.972549f}, - {0.976471f, 0.976471f, 0.976471f}, - {0.980392f, 0.980392f, 0.980392f}, - {0.984314f, 0.984314f, 0.984314f}, - {0.988235f, 0.988235f, 0.988235f}, - {0.992157f, 0.992157f, 0.992157f}, - {0.996078f, 0.996078f, 0.996078f}, - {1.0f, 1.0f, 1.0f}}; +const float grey_palette[grey_n][3] = { + {0.2, 0.2, 0.2}, + {0.20392156862745098, 0.20392156862745098, 0.20392156862745098}, + {0.20392156862745098, 0.20392156862745098, 0.20392156862745098}, + {0.20784313725490194, 0.20784313725490194, 0.20784313725490194}, + {0.21176470588235294, 0.21176470588235294, 0.21176470588235294}, + {0.21568627450980393, 0.21568627450980393, 0.21568627450980393}, + {0.2196078431372549, 0.2196078431372549, 0.2196078431372549}, + {0.2196078431372549, 0.2196078431372549, 0.2196078431372549}, + {0.22352941176470587, 0.22352941176470587, 0.22352941176470587}, + {0.22745098039215686, 0.22745098039215686, 0.22745098039215686}, + {0.23137254901960785, 0.23137254901960785, 0.23137254901960785}, + {0.23529411764705882, 0.23529411764705882, 0.23529411764705882}, + {0.23529411764705882, 0.23529411764705882, 0.23529411764705882}, + {0.2392156862745098, 0.2392156862745098, 0.2392156862745098}, + {0.24313725490196078, 0.24313725490196078, 0.24313725490196078}, + {0.24705882352941178, 0.24705882352941178, 0.24705882352941178}, + {0.25098039215686274, 0.25098039215686274, 0.25098039215686274}, + {0.25098039215686274, 0.25098039215686274, 0.25098039215686274}, + {0.2549019607843137, 0.2549019607843137, 0.2549019607843137}, + {0.2588235294117647, 0.2588235294117647, 0.2588235294117647}, + {0.2627450980392157, 0.2627450980392157, 0.2627450980392157}, + {0.26666666666666666, 0.26666666666666666, 0.26666666666666666}, + {0.26666666666666666, 0.26666666666666666, 0.26666666666666666}, + {0.27058823529411763, 0.27058823529411763, 0.27058823529411763}, + {0.27450980392156865, 0.27450980392156865, 0.27450980392156865}, + {0.2784313725490196, 0.2784313725490196, 0.2784313725490196}, + {0.2823529411764706, 0.2823529411764706, 0.2823529411764706}, + {0.2823529411764706, 0.2823529411764706, 0.2823529411764706}, + {0.28627450980392155, 0.28627450980392155, 0.28627450980392155}, + {0.2901960784313725, 0.2901960784313725, 0.2901960784313725}, + {0.29411764705882354, 0.29411764705882354, 0.29411764705882354}, + {0.2980392156862745, 0.2980392156862745, 0.2980392156862745}, + {0.2980392156862745, 0.2980392156862745, 0.2980392156862745}, + {0.30196078431372547, 0.30196078431372547, 0.30196078431372547}, + {0.3058823529411765, 0.3058823529411765, 0.3058823529411765}, + {0.30980392156862746, 0.30980392156862746, 0.30980392156862746}, + {0.3137254901960784, 0.3137254901960784, 0.3137254901960784}, + {0.3137254901960784, 0.3137254901960784, 0.3137254901960784}, + {0.3176470588235294, 0.3176470588235294, 0.3176470588235294}, + {0.32156862745098036, 0.32156862745098036, 0.32156862745098036}, + {0.3254901960784314, 0.3254901960784314, 0.3254901960784314}, + {0.32941176470588235, 0.32941176470588235, 0.32941176470588235}, + {0.32941176470588235, 0.32941176470588235, 0.32941176470588235}, + {0.3333333333333333, 0.3333333333333333, 0.3333333333333333}, + {0.33725490196078434, 0.33725490196078434, 0.33725490196078434}, + {0.3411764705882353, 0.3411764705882353, 0.3411764705882353}, + {0.34509803921568627, 0.34509803921568627, 0.34509803921568627}, + {0.34509803921568627, 0.34509803921568627, 0.34509803921568627}, + {0.34901960784313724, 0.34901960784313724, 0.34901960784313724}, + {0.3529411764705882, 0.3529411764705882, 0.3529411764705882}, + {0.3568627450980392, 0.3568627450980392, 0.3568627450980392}, + {0.3607843137254902, 0.3607843137254902, 0.3607843137254902}, + {0.3607843137254902, 0.3607843137254902, 0.3607843137254902}, + {0.36470588235294116, 0.36470588235294116, 0.36470588235294116}, + {0.3686274509803922, 0.3686274509803922, 0.3686274509803922}, + {0.37254901960784315, 0.37254901960784315, 0.37254901960784315}, + {0.3764705882352941, 0.3764705882352941, 0.3764705882352941}, + {0.3764705882352941, 0.3764705882352941, 0.3764705882352941}, + {0.3803921568627451, 0.3803921568627451, 0.3803921568627451}, + {0.38431372549019605, 0.38431372549019605, 0.38431372549019605}, + {0.38823529411764707, 0.38823529411764707, 0.38823529411764707}, + {0.39215686274509803, 0.39215686274509803, 0.39215686274509803}, + {0.39215686274509803, 0.39215686274509803, 0.39215686274509803}, + {0.396078431372549, 0.396078431372549, 0.396078431372549}, + {0.4, 0.4, 0.4}, + {0.403921568627451, 0.403921568627451, 0.403921568627451}, + {0.40784313725490196, 0.40784313725490196, 0.40784313725490196}, + {0.40784313725490196, 0.40784313725490196, 0.40784313725490196}, + {0.4117647058823529, 0.4117647058823529, 0.4117647058823529}, + {0.4156862745098039, 0.4156862745098039, 0.4156862745098039}, + {0.4196078431372549, 0.4196078431372549, 0.4196078431372549}, + {0.4235294117647059, 0.4235294117647059, 0.4235294117647059}, + {0.4235294117647059, 0.4235294117647059, 0.4235294117647059}, + {0.42745098039215684, 0.42745098039215684, 0.42745098039215684}, + {0.43137254901960786, 0.43137254901960786, 0.43137254901960786}, + {0.43529411764705883, 0.43529411764705883, 0.43529411764705883}, + {0.4392156862745098, 0.4392156862745098, 0.4392156862745098}, + {0.4392156862745098, 0.4392156862745098, 0.4392156862745098}, + {0.44313725490196076, 0.44313725490196076, 0.44313725490196076}, + {0.44705882352941173, 0.44705882352941173, 0.44705882352941173}, + {0.45098039215686275, 0.45098039215686275, 0.45098039215686275}, + {0.4549019607843137, 0.4549019607843137, 0.4549019607843137}, + {0.4549019607843137, 0.4549019607843137, 0.4549019607843137}, + {0.4588235294117647, 0.4588235294117647, 0.4588235294117647}, + {0.4627450980392157, 0.4627450980392157, 0.4627450980392157}, + {0.4666666666666667, 0.4666666666666667, 0.4666666666666667}, + {0.47058823529411764, 0.47058823529411764, 0.47058823529411764}, + {0.47058823529411764, 0.47058823529411764, 0.47058823529411764}, + {0.4745098039215686, 0.4745098039215686, 0.4745098039215686}, + {0.4784313725490196, 0.4784313725490196, 0.4784313725490196}, + {0.4823529411764706, 0.4823529411764706, 0.4823529411764706}, + {0.48627450980392156, 0.48627450980392156, 0.48627450980392156}, + {0.48627450980392156, 0.48627450980392156, 0.48627450980392156}, + {0.49019607843137253, 0.49019607843137253, 0.49019607843137253}, + {0.49411764705882355, 0.49411764705882355, 0.49411764705882355}, + {0.4980392156862745, 0.4980392156862745, 0.4980392156862745}, + {0.5019607843137255, 0.5019607843137255, 0.5019607843137255}, + {0.5019607843137255, 0.5019607843137255, 0.5019607843137255}, + {0.5058823529411764, 0.5058823529411764, 0.5058823529411764}, + {0.5098039215686274, 0.5098039215686274, 0.5098039215686274}, + {0.5137254901960784, 0.5137254901960784, 0.5137254901960784}, + {0.5176470588235293, 0.5176470588235293, 0.5176470588235293}, + {0.5176470588235293, 0.5176470588235293, 0.5176470588235293}, + {0.5215686274509804, 0.5215686274509804, 0.5215686274509804}, + {0.5254901960784314, 0.5254901960784314, 0.5254901960784314}, + {0.5294117647058824, 0.5294117647058824, 0.5294117647058824}, + {0.5333333333333333, 0.5333333333333333, 0.5333333333333333}, + {0.5333333333333333, 0.5333333333333333, 0.5333333333333333}, + {0.5372549019607843, 0.5372549019607843, 0.5372549019607843}, + {0.5411764705882353, 0.5411764705882353, 0.5411764705882353}, + {0.5450980392156862, 0.5450980392156862, 0.5450980392156862}, + {0.5490196078431373, 0.5490196078431373, 0.5490196078431373}, + {0.5490196078431373, 0.5490196078431373, 0.5490196078431373}, + {0.5529411764705883, 0.5529411764705883, 0.5529411764705883}, + {0.5568627450980392, 0.5568627450980392, 0.5568627450980392}, + {0.5607843137254902, 0.5607843137254902, 0.5607843137254902}, + {0.5647058823529412, 0.5647058823529412, 0.5647058823529412}, + {0.5647058823529412, 0.5647058823529412, 0.5647058823529412}, + {0.5686274509803921, 0.5686274509803921, 0.5686274509803921}, + {0.5725490196078431, 0.5725490196078431, 0.5725490196078431}, + {0.5764705882352941, 0.5764705882352941, 0.5764705882352941}, + {0.580392156862745, 0.580392156862745, 0.580392156862745}, + {0.580392156862745, 0.580392156862745, 0.580392156862745}, + {0.5843137254901961, 0.5843137254901961, 0.5843137254901961}, + {0.5882352941176471, 0.5882352941176471, 0.5882352941176471}, + {0.592156862745098, 0.592156862745098, 0.592156862745098}, + {0.596078431372549, 0.596078431372549, 0.596078431372549}, + {0.596078431372549, 0.596078431372549, 0.596078431372549}, + {0.6, 0.6, 0.6}, + {0.6039215686274509, 0.6039215686274509, 0.6039215686274509}, + {0.6078431372549019, 0.6078431372549019, 0.6078431372549019}, + {0.611764705882353, 0.611764705882353, 0.611764705882353}, + {0.611764705882353, 0.611764705882353, 0.611764705882353}, + {0.615686274509804, 0.615686274509804, 0.615686274509804}, + {0.6196078431372549, 0.6196078431372549, 0.6196078431372549}, + {0.6235294117647059, 0.6235294117647059, 0.6235294117647059}, + {0.6274509803921569, 0.6274509803921569, 0.6274509803921569}, + {0.6274509803921569, 0.6274509803921569, 0.6274509803921569}, + {0.6313725490196078, 0.6313725490196078, 0.6313725490196078}, + {0.6352941176470588, 0.6352941176470588, 0.6352941176470588}, + {0.6392156862745098, 0.6392156862745098, 0.6392156862745098}, + {0.6431372549019607, 0.6431372549019607, 0.6431372549019607}, + {0.6431372549019607, 0.6431372549019607, 0.6431372549019607}, + {0.6470588235294118, 0.6470588235294118, 0.6470588235294118}, + {0.6509803921568628, 0.6509803921568628, 0.6509803921568628}, + {0.6549019607843137, 0.6549019607843137, 0.6549019607843137}, + {0.6588235294117647, 0.6588235294117647, 0.6588235294117647}, + {0.6588235294117647, 0.6588235294117647, 0.6588235294117647}, + {0.6627450980392157, 0.6627450980392157, 0.6627450980392157}, + {0.6666666666666666, 0.6666666666666666, 0.6666666666666666}, + {0.6705882352941176, 0.6705882352941176, 0.6705882352941176}, + {0.6745098039215687, 0.6745098039215687, 0.6745098039215687}, + {0.6745098039215687, 0.6745098039215687, 0.6745098039215687}, + {0.6784313725490196, 0.6784313725490196, 0.6784313725490196}, + {0.6823529411764706, 0.6823529411764706, 0.6823529411764706}, + {0.6862745098039216, 0.6862745098039216, 0.6862745098039216}, + {0.6901960784313725, 0.6901960784313725, 0.6901960784313725}, + {0.6901960784313725, 0.6901960784313725, 0.6901960784313725}, + {0.6941176470588235, 0.6941176470588235, 0.6941176470588235}, + {0.6980392156862745, 0.6980392156862745, 0.6980392156862745}, + {0.7019607843137254, 0.7019607843137254, 0.7019607843137254}, + {0.7058823529411764, 0.7058823529411764, 0.7058823529411764}, + {0.7058823529411764, 0.7058823529411764, 0.7058823529411764}, + {0.7098039215686275, 0.7098039215686275, 0.7098039215686275}, + {0.7137254901960784, 0.7137254901960784, 0.7137254901960784}, + {0.7176470588235294, 0.7176470588235294, 0.7176470588235294}, + {0.7215686274509804, 0.7215686274509804, 0.7215686274509804}, + {0.7215686274509804, 0.7215686274509804, 0.7215686274509804}, + {0.7254901960784313, 0.7254901960784313, 0.7254901960784313}, + {0.7294117647058823, 0.7294117647058823, 0.7294117647058823}, + {0.7333333333333333, 0.7333333333333333, 0.7333333333333333}, + {0.7372549019607844, 0.7372549019607844, 0.7372549019607844}, + {0.7372549019607844, 0.7372549019607844, 0.7372549019607844}, + {0.7411764705882353, 0.7411764705882353, 0.7411764705882353}, + {0.7450980392156863, 0.7450980392156863, 0.7450980392156863}, + {0.7490196078431373, 0.7490196078431373, 0.7490196078431373}, + {0.7529411764705882, 0.7529411764705882, 0.7529411764705882}, + {0.7529411764705882, 0.7529411764705882, 0.7529411764705882}, + {0.7568627450980392, 0.7568627450980392, 0.7568627450980392}, + {0.7607843137254902, 0.7607843137254902, 0.7607843137254902}, + {0.7647058823529411, 0.7647058823529411, 0.7647058823529411}, + {0.7686274509803921, 0.7686274509803921, 0.7686274509803921}, + {0.7686274509803921, 0.7686274509803921, 0.7686274509803921}, + {0.7725490196078432, 0.7725490196078432, 0.7725490196078432}, + {0.7764705882352941, 0.7764705882352941, 0.7764705882352941}, + {0.7803921568627451, 0.7803921568627451, 0.7803921568627451}, + {0.7843137254901961, 0.7843137254901961, 0.7843137254901961}, + {0.7843137254901961, 0.7843137254901961, 0.7843137254901961}, + {0.788235294117647, 0.788235294117647, 0.788235294117647}, + {0.792156862745098, 0.792156862745098, 0.792156862745098}, + {0.796078431372549, 0.796078431372549, 0.796078431372549}, + {0.8, 0.8, 0.8}, + {0.8, 0.8, 0.8}, + {0.803921568627451, 0.803921568627451, 0.803921568627451}, + {0.807843137254902, 0.807843137254902, 0.807843137254902}, + {0.8117647058823529, 0.8117647058823529, 0.8117647058823529}, + {0.8156862745098039, 0.8156862745098039, 0.8156862745098039}, + {0.8156862745098039, 0.8156862745098039, 0.8156862745098039}, + {0.8196078431372549, 0.8196078431372549, 0.8196078431372549}, + {0.8235294117647058, 0.8235294117647058, 0.8235294117647058}, + {0.8274509803921568, 0.8274509803921568, 0.8274509803921568}, + {0.8313725490196078, 0.8313725490196078, 0.8313725490196078}, + {0.8313725490196078, 0.8313725490196078, 0.8313725490196078}, + {0.8352941176470589, 0.8352941176470589, 0.8352941176470589}, + {0.8392156862745098, 0.8392156862745098, 0.8392156862745098}, + {0.8431372549019608, 0.8431372549019608, 0.8431372549019608}, + {0.8470588235294118, 0.8470588235294118, 0.8470588235294118}, + {0.8470588235294118, 0.8470588235294118, 0.8470588235294118}, + {0.8509803921568627, 0.8509803921568627, 0.8509803921568627}, + {0.8549019607843137, 0.8549019607843137, 0.8549019607843137}, + {0.8588235294117647, 0.8588235294117647, 0.8588235294117647}, + {0.8627450980392157, 0.8627450980392157, 0.8627450980392157}, + {0.8627450980392157, 0.8627450980392157, 0.8627450980392157}, + {0.8666666666666667, 0.8666666666666667, 0.8666666666666667}, + {0.8705882352941177, 0.8705882352941177, 0.8705882352941177}, + {0.8745098039215686, 0.8745098039215686, 0.8745098039215686}, + {0.8784313725490196, 0.8784313725490196, 0.8784313725490196}, + {0.8784313725490196, 0.8784313725490196, 0.8784313725490196}, + {0.8823529411764706, 0.8823529411764706, 0.8823529411764706}, + {0.8862745098039215, 0.8862745098039215, 0.8862745098039215}, + {0.8901960784313725, 0.8901960784313725, 0.8901960784313725}, + {0.8941176470588235, 0.8941176470588235, 0.8941176470588235}, + {0.8941176470588235, 0.8941176470588235, 0.8941176470588235}, + {0.8980392156862745, 0.8980392156862745, 0.8980392156862745}, + {0.9019607843137255, 0.9019607843137255, 0.9019607843137255}, + {0.9058823529411765, 0.9058823529411765, 0.9058823529411765}, + {0.9098039215686274, 0.9098039215686274, 0.9098039215686274}, + {0.9098039215686274, 0.9098039215686274, 0.9098039215686274}, + {0.9137254901960784, 0.9137254901960784, 0.9137254901960784}, + {0.9176470588235294, 0.9176470588235294, 0.9176470588235294}, + {0.9215686274509803, 0.9215686274509803, 0.9215686274509803}, + {0.9254901960784314, 0.9254901960784314, 0.9254901960784314}, + {0.9254901960784314, 0.9254901960784314, 0.9254901960784314}, + {0.9294117647058824, 0.9294117647058824, 0.9294117647058824}, + {0.9333333333333333, 0.9333333333333333, 0.9333333333333333}, + {0.9372549019607843, 0.9372549019607843, 0.9372549019607843}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9450980392156862, 0.9450980392156862, 0.9450980392156862}, + {0.9490196078431372, 0.9490196078431372, 0.9490196078431372}, + {0.9529411764705882, 0.9529411764705882, 0.9529411764705882}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9607843137254902, 0.9607843137254902, 0.9607843137254902}, + {0.9647058823529412, 0.9647058823529412, 0.9647058823529412}, + {0.9686274509803922, 0.9686274509803922, 0.9686274509803922}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9764705882352941, 0.9764705882352941, 0.9764705882352941}, + {0.9803921568627451, 0.9803921568627451, 0.9803921568627451}, + {0.984313725490196, 0.984313725490196, 0.984313725490196}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {1.0, 1.0, 1.0}, +}; + +const size_t grey_cal_ref_n = 256; +const float grey_cal_ref_palette[grey_cal_ref_n][3] = { + {0.2, 0.2, 0.2}, + {0.21176470588235294, 0.21176470588235294, 0.21176470588235294}, + {0.22745098039215686, 0.22745098039215686, 0.22745098039215686}, + {0.24313725490196078, 0.24313725490196078, 0.24313725490196078}, + {0.2588235294117647, 0.2588235294117647, 0.2588235294117647}, + {0.27450980392156865, 0.27450980392156865, 0.27450980392156865}, + {0.28627450980392155, 0.28627450980392155, 0.28627450980392155}, + {0.30196078431372547, 0.30196078431372547, 0.30196078431372547}, + {0.3137254901960784, 0.3137254901960784, 0.3137254901960784}, + {0.32941176470588235, 0.32941176470588235, 0.32941176470588235}, + {0.34509803921568627, 0.34509803921568627, 0.34509803921568627}, + {0.3568627450980392, 0.3568627450980392, 0.3568627450980392}, + {0.3686274509803922, 0.3686274509803922, 0.3686274509803922}, + {0.3803921568627451, 0.3803921568627451, 0.3803921568627451}, + {0.39215686274509803, 0.39215686274509803, 0.39215686274509803}, + {0.40784313725490196, 0.40784313725490196, 0.40784313725490196}, + {0.4196078431372549, 0.4196078431372549, 0.4196078431372549}, + {0.43137254901960786, 0.43137254901960786, 0.43137254901960786}, + {0.44313725490196076, 0.44313725490196076, 0.44313725490196076}, + {0.4549019607843137, 0.4549019607843137, 0.4549019607843137}, + {0.4666666666666667, 0.4666666666666667, 0.4666666666666667}, + {0.4784313725490196, 0.4784313725490196, 0.4784313725490196}, + {0.48627450980392156, 0.48627450980392156, 0.48627450980392156}, + {0.5019607843137255, 0.5019607843137255, 0.5019607843137255}, + {0.5098039215686274, 0.5098039215686274, 0.5098039215686274}, + {0.5215686274509804, 0.5215686274509804, 0.5215686274509804}, + {0.5333333333333333, 0.5333333333333333, 0.5333333333333333}, + {0.5411764705882353, 0.5411764705882353, 0.5411764705882353}, + {0.5490196078431373, 0.5490196078431373, 0.5490196078431373}, + {0.5647058823529412, 0.5647058823529412, 0.5647058823529412}, + {0.5725490196078431, 0.5725490196078431, 0.5725490196078431}, + {0.580392156862745, 0.580392156862745, 0.580392156862745}, + {0.592156862745098, 0.592156862745098, 0.592156862745098}, + {0.596078431372549, 0.596078431372549, 0.596078431372549}, + {0.6078431372549019, 0.6078431372549019, 0.6078431372549019}, + {0.615686274509804, 0.615686274509804, 0.615686274509804}, + {0.6274509803921569, 0.6274509803921569, 0.6274509803921569}, + {0.6352941176470588, 0.6352941176470588, 0.6352941176470588}, + {0.6431372549019607, 0.6431372549019607, 0.6431372549019607}, + {0.6509803921568628, 0.6509803921568628, 0.6509803921568628}, + {0.6588235294117647, 0.6588235294117647, 0.6588235294117647}, + {0.6666666666666666, 0.6666666666666666, 0.6666666666666666}, + {0.6745098039215687, 0.6745098039215687, 0.6745098039215687}, + {0.6823529411764706, 0.6823529411764706, 0.6823529411764706}, + {0.6901960784313725, 0.6901960784313725, 0.6901960784313725}, + {0.6980392156862745, 0.6980392156862745, 0.6980392156862745}, + {0.7058823529411764, 0.7058823529411764, 0.7058823529411764}, + {0.7098039215686275, 0.7098039215686275, 0.7098039215686275}, + {0.7176470588235294, 0.7176470588235294, 0.7176470588235294}, + {0.7215686274509804, 0.7215686274509804, 0.7215686274509804}, + {0.7333333333333333, 0.7333333333333333, 0.7333333333333333}, + {0.7372549019607844, 0.7372549019607844, 0.7372549019607844}, + {0.7450980392156863, 0.7450980392156863, 0.7450980392156863}, + {0.7529411764705882, 0.7529411764705882, 0.7529411764705882}, + {0.7568627450980392, 0.7568627450980392, 0.7568627450980392}, + {0.7647058823529411, 0.7647058823529411, 0.7647058823529411}, + {0.7686274509803921, 0.7686274509803921, 0.7686274509803921}, + {0.7725490196078432, 0.7725490196078432, 0.7725490196078432}, + {0.7803921568627451, 0.7803921568627451, 0.7803921568627451}, + {0.7843137254901961, 0.7843137254901961, 0.7843137254901961}, + {0.792156862745098, 0.792156862745098, 0.792156862745098}, + {0.796078431372549, 0.796078431372549, 0.796078431372549}, + {0.8, 0.8, 0.8}, + {0.807843137254902, 0.807843137254902, 0.807843137254902}, + {0.8117647058823529, 0.8117647058823529, 0.8117647058823529}, + {0.8156862745098039, 0.8156862745098039, 0.8156862745098039}, + {0.8196078431372549, 0.8196078431372549, 0.8196078431372549}, + {0.8274509803921568, 0.8274509803921568, 0.8274509803921568}, + {0.8313725490196078, 0.8313725490196078, 0.8313725490196078}, + {0.8352941176470589, 0.8352941176470589, 0.8352941176470589}, + {0.8392156862745098, 0.8392156862745098, 0.8392156862745098}, + {0.8431372549019608, 0.8431372549019608, 0.8431372549019608}, + {0.8470588235294118, 0.8470588235294118, 0.8470588235294118}, + {0.8509803921568627, 0.8509803921568627, 0.8509803921568627}, + {0.8549019607843137, 0.8549019607843137, 0.8549019607843137}, + {0.8627450980392157, 0.8627450980392157, 0.8627450980392157}, + {0.8627450980392157, 0.8627450980392157, 0.8627450980392157}, + {0.8666666666666667, 0.8666666666666667, 0.8666666666666667}, + {0.8705882352941177, 0.8705882352941177, 0.8705882352941177}, + {0.8745098039215686, 0.8745098039215686, 0.8745098039215686}, + {0.8784313725490196, 0.8784313725490196, 0.8784313725490196}, + {0.8823529411764706, 0.8823529411764706, 0.8823529411764706}, + {0.8862745098039215, 0.8862745098039215, 0.8862745098039215}, + {0.8901960784313725, 0.8901960784313725, 0.8901960784313725}, + {0.8941176470588235, 0.8941176470588235, 0.8941176470588235}, + {0.8941176470588235, 0.8941176470588235, 0.8941176470588235}, + {0.8980392156862745, 0.8980392156862745, 0.8980392156862745}, + {0.9019607843137255, 0.9019607843137255, 0.9019607843137255}, + {0.9058823529411765, 0.9058823529411765, 0.9058823529411765}, + {0.9098039215686274, 0.9098039215686274, 0.9098039215686274}, + {0.9098039215686274, 0.9098039215686274, 0.9098039215686274}, + {0.9098039215686274, 0.9098039215686274, 0.9098039215686274}, + {0.9137254901960784, 0.9137254901960784, 0.9137254901960784}, + {0.9176470588235294, 0.9176470588235294, 0.9176470588235294}, + {0.9215686274509803, 0.9215686274509803, 0.9215686274509803}, + {0.9254901960784314, 0.9254901960784314, 0.9254901960784314}, + {0.9254901960784314, 0.9254901960784314, 0.9254901960784314}, + {0.9254901960784314, 0.9254901960784314, 0.9254901960784314}, + {0.9294117647058824, 0.9294117647058824, 0.9294117647058824}, + {0.9333333333333333, 0.9333333333333333, 0.9333333333333333}, + {0.9333333333333333, 0.9333333333333333, 0.9333333333333333}, + {0.9372549019607843, 0.9372549019607843, 0.9372549019607843}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9411764705882353, 0.9411764705882353, 0.9411764705882353}, + {0.9450980392156862, 0.9450980392156862, 0.9450980392156862}, + {0.9490196078431372, 0.9490196078431372, 0.9490196078431372}, + {0.9490196078431372, 0.9490196078431372, 0.9490196078431372}, + {0.9529411764705882, 0.9529411764705882, 0.9529411764705882}, + {0.9529411764705882, 0.9529411764705882, 0.9529411764705882}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9568627450980391, 0.9568627450980391, 0.9568627450980391}, + {0.9607843137254902, 0.9607843137254902, 0.9607843137254902}, + {0.9607843137254902, 0.9607843137254902, 0.9607843137254902}, + {0.9647058823529412, 0.9647058823529412, 0.9647058823529412}, + {0.9647058823529412, 0.9647058823529412, 0.9647058823529412}, + {0.9647058823529412, 0.9647058823529412, 0.9647058823529412}, + {0.9686274509803922, 0.9686274509803922, 0.9686274509803922}, + {0.9686274509803922, 0.9686274509803922, 0.9686274509803922}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9725490196078431, 0.9725490196078431, 0.9725490196078431}, + {0.9764705882352941, 0.9764705882352941, 0.9764705882352941}, + {0.9764705882352941, 0.9764705882352941, 0.9764705882352941}, + {0.9764705882352941, 0.9764705882352941, 0.9764705882352941}, + {0.9803921568627451, 0.9803921568627451, 0.9803921568627451}, + {0.9803921568627451, 0.9803921568627451, 0.9803921568627451}, + {0.9803921568627451, 0.9803921568627451, 0.9803921568627451}, + {0.9803921568627451, 0.9803921568627451, 0.9803921568627451}, + {0.984313725490196, 0.984313725490196, 0.984313725490196}, + {0.984313725490196, 0.984313725490196, 0.984313725490196}, + {0.984313725490196, 0.984313725490196, 0.984313725490196}, + {0.984313725490196, 0.984313725490196, 0.984313725490196}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9882352941176471, 0.9882352941176471, 0.9882352941176471}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.9921568627450981, 0.9921568627450981, 0.9921568627450981}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {0.996078431372549, 0.996078431372549, 0.996078431372549}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, + {1.0, 1.0, 1.0}, +}; + + // original colormaps licensed CC0f, public domain, no attribution needed: // https://github.com/BIDS/colormap/blob/master/LICENSE.txt @@ -1060,137 +1586,787 @@ const float viridis_palette[viridis_n][3] = { {0.97441665f, 0.90358991f, 0.13021494f}, {0.98386829f, 0.90486726f, 0.13689671f}, {0.99324789f, 0.90615657f, 0.1439362f}}; + +const size_t viridis_cal_ref_n = 256; +const float viridis_cal_ref_palette[viridis_cal_ref_n][3] = { + {0.26700401, 0.00487433, 0.32941519}, + {0.27259384, 0.02556309, 0.35309303}, + {0.27794143, 0.05632444, 0.38119074}, + {0.28144581, 0.0843197, 0.40741404}, + {0.28309095, 0.11055307, 0.43155375}, + {0.28288389, 0.13592005, 0.45342734}, + {0.28141228, 0.15583425, 0.46920128}, + {0.27801236, 0.18036684, 0.48669702}, + {0.27412802, 0.19972086, 0.49891131}, + {0.26796846, 0.22354911, 0.5120084}, + {0.26213801, 0.24228619, 0.52083736}, + {0.25564519, 0.26070284, 0.52831152}, + {0.24862899, 0.27877509, 0.53455561}, + {0.24123708, 0.29648471, 0.53970946}, + {0.23360277, 0.31382773, 0.54391424}, + {0.2258633, 0.33080515, 0.54731353}, + {0.21812995, 0.34743154, 0.55003755}, + {0.2105031, 0.36372671, 0.55220646}, + {0.20306309, 0.37971644, 0.55392505}, + {0.19585993, 0.39543297, 0.55527637}, + {0.19063135, 0.40706148, 0.55608907}, + {0.18389763, 0.42238275, 0.55694377}, + {0.17901879, 0.43375572, 0.55743035}, + {0.17271876, 0.4487906, 0.55788532}, + {0.16812641, 0.45998802, 0.55808199}, + {0.16214155, 0.47483821, 0.55813967}, + {0.15772933, 0.48593197, 0.55801347}, + {0.15336445, 0.49700003, 0.55772371}, + {0.14903918, 0.50805136, 0.5572505}, + {0.14334327, 0.52277292, 0.55629491}, + {0.13914708, 0.53381201, 0.55529773}, + {0.13506561, 0.54485335, 0.55402906}, + {0.13117249, 0.55589872, 0.55245948}, + {0.12872938, 0.56326503, 0.55122927}, + {0.12539383, 0.57431754, 0.54908564}, + {0.12260562, 0.58537105, 0.54655722}, + {0.12056501, 0.59642187, 0.54361058}, + {0.11951163, 0.60746388, 0.54021751}, + {0.11948255, 0.61481702, 0.53769219}, + {0.12063824, 0.62582833, 0.53348834}, + {0.12231244, 0.63315277, 0.53039808}, + {0.12632581, 0.64410744, 0.52531069}, + {0.13006688, 0.65138436, 0.52160791}, + {0.13733921, 0.66225157, 0.51557101}, + {0.14330291, 0.66945881, 0.51121549}, + {0.15389405, 0.68020343, 0.50417217}, + {0.16201598, 0.68731632, 0.49912906}, + {0.1709484, 0.69438405, 0.49380294}, + {0.18065314, 0.70140222, 0.48818938}, + {0.19109018, 0.70836635, 0.48228395}, + {0.20803045, 0.71870095, 0.4728733}, + {0.22012381, 0.72550945, 0.46622638}, + {0.23281498, 0.73224735, 0.45927675}, + {0.24606968, 0.73890972, 0.45202405}, + {0.25985676, 0.74549162, 0.44446673}, + {0.27414922, 0.75198807, 0.4366009}, + {0.28147681, 0.75520266, 0.43255207}, + {0.29647899, 0.76156142, 0.42422341}, + {0.31192534, 0.76782207, 0.41558638}, + {0.3277958, 0.77397953, 0.40664011}, + {0.34407411, 0.78002855, 0.39738103}, + {0.35235985, 0.78301086, 0.39263579}, + {0.3692142, 0.78888793, 0.38291438}, + {0.38643282, 0.79464415, 0.37288606}, + {0.39517408, 0.79747541, 0.36775726}, + {0.4129135, 0.80304099, 0.35726893}, + {0.42190813, 0.80577412, 0.35191009}, + {0.44013691, 0.81113836, 0.3409673}, + {0.44936763, 0.81376835, 0.33538426}, + {0.46805314, 0.81892143, 0.32399761}, + {0.47750446, 0.82144351, 0.31819529}, + {0.4870258, 0.82392862, 0.31232133}, + {0.5062713, 0.82878621, 0.30036211}, + {0.51599182, 0.83115784, 0.29427888}, + {0.52577622, 0.83349064, 0.2881265}, + {0.5455244, 0.83803918, 0.27562602}, + {0.55548397, 0.84025437, 0.26928147}, + {0.5654976, 0.8424299, 0.26287683}, + {0.57556297, 0.84456561, 0.25641457}, + {0.58567772, 0.84666139, 0.24989748}, + {0.60604528, 0.8507331, 0.23671214}, + {0.61629283, 0.85270912, 0.23005179}, + {0.62657923, 0.85464543, 0.22335258}, + {0.63690157, 0.85654226, 0.21662012}, + {0.64725685, 0.85839991, 0.20986086}, + {0.65764197, 0.86021878, 0.20308229}, + {0.66805369, 0.86199932, 0.19629307}, + {0.67848868, 0.86374211, 0.18950326}, + {0.68894351, 0.86544779, 0.18272455}, + {0.69941463, 0.86711711, 0.17597055}, + {0.69941463, 0.86711711, 0.17597055}, + {0.70989842, 0.86875092, 0.16925712}, + {0.72039115, 0.87035015, 0.16260273}, + {0.73088902, 0.87191584, 0.15602894}, + {0.74138803, 0.87344918, 0.14956101}, + {0.75188414, 0.87495143, 0.14322828}, + {0.75188414, 0.87495143, 0.14322828}, + {0.76237342, 0.87642392, 0.13706449}, + {0.77285183, 0.87786808, 0.13110864}, + {0.78331535, 0.87928545, 0.12540538}, + {0.78331535, 0.87928545, 0.12540538}, + {0.79375994, 0.88067763, 0.12000532}, + {0.80418159, 0.88204632, 0.11496505}, + {0.80418159, 0.88204632, 0.11496505}, + {0.81457634, 0.88339329, 0.11034678}, + {0.81457634, 0.88339329, 0.11034678}, + {0.82494028, 0.88472036, 0.10621724}, + {0.83526959, 0.88602943, 0.1026459}, + {0.83526959, 0.88602943, 0.1026459}, + {0.84556056, 0.88732243, 0.09970219}, + {0.84556056, 0.88732243, 0.09970219}, + {0.8558096, 0.88860134, 0.09745186}, + {0.8558096, 0.88860134, 0.09745186}, + {0.86601325, 0.88986815, 0.09595277}, + {0.86601325, 0.88986815, 0.09595277}, + {0.87616824, 0.89112487, 0.09525046}, + {0.87616824, 0.89112487, 0.09525046}, + {0.88627146, 0.89237353, 0.09537439}, + {0.88627146, 0.89237353, 0.09537439}, + {0.88627146, 0.89237353, 0.09537439}, + {0.89632002, 0.89361614, 0.09633538}, + {0.89632002, 0.89361614, 0.09633538}, + {0.90631121, 0.89485467, 0.09812496}, + {0.90631121, 0.89485467, 0.09812496}, + {0.90631121, 0.89485467, 0.09812496}, + {0.91624212, 0.89609127, 0.1007168}, + {0.91624212, 0.89609127, 0.1007168}, + {0.91624212, 0.89609127, 0.1007168}, + {0.92610579, 0.89732977, 0.10407067}, + {0.92610579, 0.89732977, 0.10407067}, + {0.92610579, 0.89732977, 0.10407067}, + {0.93590444, 0.8985704, 0.10813094}, + {0.93590444, 0.8985704, 0.10813094}, + {0.93590444, 0.8985704, 0.10813094}, + {0.93590444, 0.8985704, 0.10813094}, + {0.94563626, 0.899815, 0.11283773}, + {0.94563626, 0.899815, 0.11283773}, + {0.94563626, 0.899815, 0.11283773}, + {0.94563626, 0.899815, 0.11283773}, + {0.95529972, 0.90106534, 0.11812832}, + {0.95529972, 0.90106534, 0.11812832}, + {0.95529972, 0.90106534, 0.11812832}, + {0.95529972, 0.90106534, 0.11812832}, + {0.95529972, 0.90106534, 0.11812832}, + {0.95529972, 0.90106534, 0.11812832}, + {0.96489353, 0.90232311, 0.12394051}, + {0.96489353, 0.90232311, 0.12394051}, + {0.96489353, 0.90232311, 0.12394051}, + {0.96489353, 0.90232311, 0.12394051}, + {0.96489353, 0.90232311, 0.12394051}, + {0.96489353, 0.90232311, 0.12394051}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.97441665, 0.90358991, 0.13021494}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.98386829, 0.90486726, 0.13689671}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, + {0.99324789, 0.90615657, 0.1439362}, +}; + const size_t magma_n = 256; const float magma_palette[magma_n][3] = { - {0.001462f, 0.000466f, 0.013866f}, {0.002258f, 0.001295f, 0.018331f}, - {0.003279f, 0.002305f, 0.023708f}, {0.004512f, 0.00349f, 0.029965f}, - {0.00595f, 0.004843f, 0.03713f}, {0.007588f, 0.006356f, 0.044973f}, - {0.009426f, 0.008022f, 0.052844f}, {0.011465f, 0.009828f, 0.06075f}, - {0.013708f, 0.011771f, 0.068667f}, {0.016156f, 0.01384f, 0.076603f}, - {0.018815f, 0.016026f, 0.084584f}, {0.021692f, 0.01832f, 0.09261f}, - {0.024792f, 0.020715f, 0.100676f}, {0.028123f, 0.023201f, 0.108787f}, - {0.031696f, 0.025765f, 0.116965f}, {0.03552f, 0.028397f, 0.125209f}, - {0.039608f, 0.03109f, 0.133515f}, {0.04383f, 0.03383f, 0.141886f}, - {0.048062f, 0.036607f, 0.150327f}, {0.05232f, 0.039407f, 0.158841f}, - {0.056615f, 0.04216f, 0.167446f}, {0.060949f, 0.044794f, 0.176129f}, - {0.06533f, 0.047318f, 0.184892f}, {0.069764f, 0.049726f, 0.193735f}, - {0.074257f, 0.052017f, 0.20266f}, {0.078815f, 0.054184f, 0.211667f}, - {0.083446f, 0.056225f, 0.220755f}, {0.088155f, 0.058133f, 0.229922f}, - {0.092949f, 0.059904f, 0.239164f}, {0.097833f, 0.061531f, 0.248477f}, - {0.102815f, 0.06301f, 0.257854f}, {0.107899f, 0.064335f, 0.267289f}, - {0.113094f, 0.065492f, 0.276784f}, {0.118405f, 0.066479f, 0.286321f}, - {0.123833f, 0.067295f, 0.295879f}, {0.12938f, 0.067935f, 0.305443f}, - {0.135053f, 0.068391f, 0.315f}, {0.140858f, 0.068654f, 0.324538f}, - {0.146785f, 0.068738f, 0.334011f}, {0.152839f, 0.068637f, 0.343404f}, - {0.159018f, 0.068354f, 0.352688f}, {0.165308f, 0.067911f, 0.361816f}, - {0.171713f, 0.067305f, 0.370771f}, {0.178212f, 0.066576f, 0.379497f}, - {0.184801f, 0.065732f, 0.387973f}, {0.19146f, 0.064818f, 0.396152f}, - {0.198177f, 0.063862f, 0.404009f}, {0.204935f, 0.062907f, 0.411514f}, - {0.211718f, 0.061992f, 0.418647f}, {0.218512f, 0.061158f, 0.425392f}, - {0.225302f, 0.060445f, 0.431742f}, {0.232077f, 0.059889f, 0.437695f}, - {0.238826f, 0.059517f, 0.443256f}, {0.245543f, 0.059352f, 0.448436f}, - {0.25222f, 0.059415f, 0.453248f}, {0.258857f, 0.059706f, 0.45771f}, - {0.265447f, 0.060237f, 0.46184f}, {0.271994f, 0.060994f, 0.46566f}, - {0.278493f, 0.061978f, 0.46919f}, {0.284951f, 0.063168f, 0.472451f}, - {0.291366f, 0.064553f, 0.475462f}, {0.29774f, 0.066117f, 0.478243f}, - {0.304081f, 0.067835f, 0.480812f}, {0.310382f, 0.069702f, 0.483186f}, - {0.316654f, 0.07169f, 0.48538f}, {0.322899f, 0.073782f, 0.487408f}, - {0.329114f, 0.075972f, 0.489287f}, {0.335308f, 0.078236f, 0.491024f}, - {0.341482f, 0.080564f, 0.492631f}, {0.347636f, 0.082946f, 0.494121f}, - {0.353773f, 0.085373f, 0.495501f}, {0.359898f, 0.087831f, 0.496778f}, - {0.366012f, 0.090314f, 0.49796f}, {0.372116f, 0.092816f, 0.499053f}, - {0.378211f, 0.095332f, 0.500067f}, {0.384299f, 0.097855f, 0.501002f}, - {0.390384f, 0.100379f, 0.501864f}, {0.396467f, 0.102902f, 0.502658f}, - {0.402548f, 0.10542f, 0.503386f}, {0.408629f, 0.10793f, 0.504052f}, - {0.414709f, 0.110431f, 0.504662f}, {0.420791f, 0.11292f, 0.505215f}, - {0.426877f, 0.115395f, 0.505714f}, {0.432967f, 0.117855f, 0.50616f}, - {0.439062f, 0.120298f, 0.506555f}, {0.445163f, 0.122724f, 0.506901f}, - {0.451271f, 0.125132f, 0.507198f}, {0.457386f, 0.127522f, 0.507448f}, - {0.463508f, 0.129893f, 0.507652f}, {0.46964f, 0.132245f, 0.507809f}, - {0.47578f, 0.134577f, 0.507921f}, {0.481929f, 0.136891f, 0.507989f}, - {0.488088f, 0.139186f, 0.508011f}, {0.494258f, 0.141462f, 0.507988f}, - {0.500438f, 0.143719f, 0.50792f}, {0.506629f, 0.145958f, 0.507806f}, - {0.512831f, 0.148179f, 0.507648f}, {0.519045f, 0.150383f, 0.507443f}, - {0.52527f, 0.152569f, 0.507192f}, {0.531507f, 0.154739f, 0.506895f}, - {0.537755f, 0.156894f, 0.506551f}, {0.544015f, 0.159033f, 0.506159f}, - {0.550287f, 0.161158f, 0.505719f}, {0.556571f, 0.163269f, 0.50523f}, - {0.562866f, 0.165368f, 0.504692f}, {0.569172f, 0.167454f, 0.504105f}, - {0.57549f, 0.16953f, 0.503466f}, {0.581819f, 0.171596f, 0.502777f}, - {0.588158f, 0.173652f, 0.502035f}, {0.594508f, 0.175701f, 0.501241f}, - {0.600868f, 0.177743f, 0.500394f}, {0.607238f, 0.179779f, 0.499492f}, - {0.613617f, 0.181811f, 0.498536f}, {0.620005f, 0.18384f, 0.497524f}, - {0.626401f, 0.185867f, 0.496456f}, {0.632805f, 0.187893f, 0.495332f}, - {0.639216f, 0.189921f, 0.49415f}, {0.645633f, 0.191952f, 0.49291f}, - {0.652056f, 0.193986f, 0.491611f}, {0.658483f, 0.196027f, 0.490253f}, - {0.664915f, 0.198075f, 0.488836f}, {0.671349f, 0.200133f, 0.487358f}, - {0.677786f, 0.202203f, 0.485819f}, {0.684224f, 0.204286f, 0.484219f}, - {0.690661f, 0.206384f, 0.482558f}, {0.697098f, 0.208501f, 0.480835f}, - {0.703532f, 0.210638f, 0.479049f}, {0.709962f, 0.212797f, 0.477201f}, - {0.716387f, 0.214982f, 0.47529f}, {0.722805f, 0.217194f, 0.473316f}, - {0.729216f, 0.219437f, 0.471279f}, {0.735616f, 0.221713f, 0.46918f}, - {0.742004f, 0.224025f, 0.467018f}, {0.748378f, 0.226377f, 0.464794f}, - {0.754737f, 0.228772f, 0.462509f}, {0.761077f, 0.231214f, 0.460162f}, - {0.767398f, 0.233705f, 0.457755f}, {0.773695f, 0.236249f, 0.455289f}, - {0.779968f, 0.238851f, 0.452765f}, {0.786212f, 0.241514f, 0.450184f}, - {0.792427f, 0.244242f, 0.447543f}, {0.798608f, 0.24704f, 0.444848f}, - {0.804752f, 0.249911f, 0.442102f}, {0.810855f, 0.252861f, 0.439305f}, - {0.816914f, 0.255895f, 0.436461f}, {0.822926f, 0.259016f, 0.433573f}, - {0.828886f, 0.262229f, 0.430644f}, {0.834791f, 0.26554f, 0.427671f}, - {0.840636f, 0.268953f, 0.424666f}, {0.846416f, 0.272473f, 0.421631f}, - {0.852126f, 0.276106f, 0.418573f}, {0.857763f, 0.279857f, 0.415496f}, - {0.86332f, 0.283729f, 0.412403f}, {0.868793f, 0.287728f, 0.409303f}, - {0.874176f, 0.291859f, 0.406205f}, {0.879464f, 0.296125f, 0.403118f}, - {0.884651f, 0.30053f, 0.400047f}, {0.889731f, 0.305079f, 0.397002f}, - {0.8947f, 0.309773f, 0.393995f}, {0.899552f, 0.314616f, 0.391037f}, - {0.904281f, 0.31961f, 0.388137f}, {0.908884f, 0.324755f, 0.385308f}, - {0.913354f, 0.330052f, 0.382563f}, {0.917689f, 0.3355f, 0.379915f}, - {0.921884f, 0.341098f, 0.377376f}, {0.925937f, 0.346844f, 0.374959f}, - {0.929845f, 0.352734f, 0.372677f}, {0.933606f, 0.358764f, 0.370541f}, - {0.937221f, 0.364929f, 0.368567f}, {0.940687f, 0.371224f, 0.366762f}, - {0.944006f, 0.377643f, 0.365136f}, {0.94718f, 0.384178f, 0.363701f}, - {0.95021f, 0.39082f, 0.362468f}, {0.953099f, 0.397563f, 0.361438f}, - {0.955849f, 0.4044f, 0.360619f}, {0.958464f, 0.411324f, 0.360014f}, - {0.960949f, 0.418323f, 0.35963f}, {0.96331f, 0.42539f, 0.359469f}, - {0.965549f, 0.432519f, 0.359529f}, {0.967671f, 0.439703f, 0.35981f}, - {0.96968f, 0.446936f, 0.360311f}, {0.971582f, 0.45421f, 0.36103f}, - {0.973381f, 0.46152f, 0.361965f}, {0.975082f, 0.468861f, 0.363111f}, - {0.97669f, 0.476226f, 0.364466f}, {0.97821f, 0.483612f, 0.366025f}, - {0.979645f, 0.491014f, 0.367783f}, {0.981f, 0.498428f, 0.369734f}, - {0.982279f, 0.505851f, 0.371874f}, {0.983485f, 0.51328f, 0.374198f}, - {0.984622f, 0.520713f, 0.376698f}, {0.985693f, 0.528148f, 0.379371f}, - {0.9867f, 0.535582f, 0.38221f}, {0.987646f, 0.543015f, 0.38521f}, - {0.988533f, 0.550446f, 0.388365f}, {0.989363f, 0.557873f, 0.391671f}, - {0.990138f, 0.565296f, 0.395122f}, {0.990871f, 0.572706f, 0.398714f}, - {0.991558f, 0.580107f, 0.402441f}, {0.992196f, 0.587502f, 0.406299f}, - {0.992785f, 0.594891f, 0.410283f}, {0.993326f, 0.602275f, 0.41439f}, - {0.993834f, 0.609644f, 0.418613f}, {0.994309f, 0.616999f, 0.42295f}, - {0.994738f, 0.62435f, 0.427397f}, {0.995122f, 0.631696f, 0.431951f}, - {0.99548f, 0.639027f, 0.436607f}, {0.99581f, 0.646344f, 0.441361f}, - {0.996096f, 0.653659f, 0.446213f}, {0.996341f, 0.660969f, 0.45116f}, - {0.99658f, 0.668256f, 0.456192f}, {0.996775f, 0.675541f, 0.461314f}, - {0.996925f, 0.682828f, 0.466526f}, {0.997077f, 0.690088f, 0.471811f}, - {0.997186f, 0.697349f, 0.477182f}, {0.997254f, 0.704611f, 0.482635f}, - {0.997325f, 0.711848f, 0.488154f}, {0.997351f, 0.719089f, 0.493755f}, - {0.997351f, 0.726324f, 0.499428f}, {0.997341f, 0.733545f, 0.505167f}, - {0.997285f, 0.740772f, 0.510983f}, {0.997228f, 0.747981f, 0.516859f}, - {0.997138f, 0.75519f, 0.522806f}, {0.997019f, 0.762398f, 0.528821f}, - {0.996898f, 0.769591f, 0.534892f}, {0.996727f, 0.776795f, 0.541039f}, - {0.996571f, 0.783977f, 0.547233f}, {0.996369f, 0.791167f, 0.553499f}, - {0.996162f, 0.798348f, 0.55982f}, {0.995932f, 0.805527f, 0.566202f}, - {0.99568f, 0.812706f, 0.572645f}, {0.995424f, 0.819875f, 0.57914f}, - {0.995131f, 0.827052f, 0.585701f}, {0.994851f, 0.834213f, 0.592307f}, - {0.994524f, 0.841387f, 0.598983f}, {0.994222f, 0.84854f, 0.605696f}, - {0.993866f, 0.855711f, 0.612482f}, {0.993545f, 0.862859f, 0.619299f}, - {0.99317f, 0.870024f, 0.626189f}, {0.992831f, 0.877168f, 0.633109f}, - {0.99244f, 0.88433f, 0.640099f}, {0.992089f, 0.89147f, 0.647116f}, - {0.991688f, 0.898627f, 0.654202f}, {0.991332f, 0.905763f, 0.661309f}, - {0.99093f, 0.912915f, 0.668481f}, {0.99057f, 0.920049f, 0.675675f}, - {0.990175f, 0.927196f, 0.682926f}, {0.989815f, 0.934329f, 0.690198f}, - {0.989434f, 0.94147f, 0.697519f}, {0.989077f, 0.948604f, 0.704863f}, - {0.988717f, 0.955742f, 0.712242f}, {0.988367f, 0.962878f, 0.719649f}, - {0.988033f, 0.970012f, 0.727077f}, {0.987691f, 0.977154f, 0.734536f}, - {0.987387f, 0.984288f, 0.742002f}, {0.987053f, 0.991438f, 0.749504f}}; + {0.232077, 0.059889, 0.437695}, + {0.238826, 0.059517, 0.443256}, + {0.238826, 0.059517, 0.443256}, + {0.245543, 0.059352, 0.448436}, + {0.25222, 0.059415, 0.453248}, + {0.258857, 0.059706, 0.45771}, + {0.265447, 0.060237, 0.46184}, + {0.265447, 0.060237, 0.46184}, + {0.271994, 0.060994, 0.46566}, + {0.278493, 0.061978, 0.46919}, + {0.284951, 0.063168, 0.472451}, + {0.291366, 0.064553, 0.475462}, + {0.291366, 0.064553, 0.475462}, + {0.29774, 0.066117, 0.478243}, + {0.304081, 0.067835, 0.480812}, + {0.310382, 0.069702, 0.483186}, + {0.316654, 0.07169, 0.48538}, + {0.316654, 0.07169, 0.48538}, + {0.322899, 0.073782, 0.487408}, + {0.329114, 0.075972, 0.489287}, + {0.335308, 0.078236, 0.491024}, + {0.341482, 0.080564, 0.492631}, + {0.341482, 0.080564, 0.492631}, + {0.347636, 0.082946, 0.494121}, + {0.353773, 0.085373, 0.495501}, + {0.359898, 0.087831, 0.496778}, + {0.366012, 0.090314, 0.49796}, + {0.366012, 0.090314, 0.49796}, + {0.372116, 0.092816, 0.499053}, + {0.378211, 0.095332, 0.500067}, + {0.384299, 0.097855, 0.501002}, + {0.390384, 0.100379, 0.501864}, + {0.390384, 0.100379, 0.501864}, + {0.396467, 0.102902, 0.502658}, + {0.402548, 0.10542, 0.503386}, + {0.408629, 0.10793, 0.504052}, + {0.414709, 0.110431, 0.504662}, + {0.414709, 0.110431, 0.504662}, + {0.420791, 0.11292, 0.505215}, + {0.426877, 0.115395, 0.505714}, + {0.432967, 0.117855, 0.50616}, + {0.439062, 0.120298, 0.506555}, + {0.439062, 0.120298, 0.506555}, + {0.445163, 0.122724, 0.506901}, + {0.451271, 0.125132, 0.507198}, + {0.457386, 0.127522, 0.507448}, + {0.463508, 0.129893, 0.507652}, + {0.463508, 0.129893, 0.507652}, + {0.46964, 0.132245, 0.507809}, + {0.47578, 0.134577, 0.507921}, + {0.481929, 0.136891, 0.507989}, + {0.488088, 0.139186, 0.508011}, + {0.488088, 0.139186, 0.508011}, + {0.494258, 0.141462, 0.507988}, + {0.500438, 0.143719, 0.50792}, + {0.506629, 0.145958, 0.507806}, + {0.512831, 0.148179, 0.507648}, + {0.512831, 0.148179, 0.507648}, + {0.519045, 0.150383, 0.507443}, + {0.52527, 0.152569, 0.507192}, + {0.531507, 0.154739, 0.506895}, + {0.537755, 0.156894, 0.506551}, + {0.537755, 0.156894, 0.506551}, + {0.544015, 0.159033, 0.506159}, + {0.550287, 0.161158, 0.505719}, + {0.556571, 0.163269, 0.50523}, + {0.562866, 0.165368, 0.504692}, + {0.562866, 0.165368, 0.504692}, + {0.569172, 0.167454, 0.504105}, + {0.57549, 0.16953, 0.503466}, + {0.581819, 0.171596, 0.502777}, + {0.588158, 0.173652, 0.502035}, + {0.588158, 0.173652, 0.502035}, + {0.594508, 0.175701, 0.501241}, + {0.600868, 0.177743, 0.500394}, + {0.607238, 0.179779, 0.499492}, + {0.613617, 0.181811, 0.498536}, + {0.613617, 0.181811, 0.498536}, + {0.620005, 0.18384, 0.497524}, + {0.626401, 0.185867, 0.496456}, + {0.632805, 0.187893, 0.495332}, + {0.639216, 0.189921, 0.49415}, + {0.639216, 0.189921, 0.49415}, + {0.645633, 0.191952, 0.49291}, + {0.652056, 0.193986, 0.491611}, + {0.658483, 0.196027, 0.490253}, + {0.664915, 0.198075, 0.488836}, + {0.664915, 0.198075, 0.488836}, + {0.671349, 0.200133, 0.487358}, + {0.677786, 0.202203, 0.485819}, + {0.684224, 0.204286, 0.484219}, + {0.690661, 0.206384, 0.482558}, + {0.690661, 0.206384, 0.482558}, + {0.697098, 0.208501, 0.480835}, + {0.703532, 0.210638, 0.479049}, + {0.709962, 0.212797, 0.477201}, + {0.716387, 0.214982, 0.47529}, + {0.716387, 0.214982, 0.47529}, + {0.722805, 0.217194, 0.473316}, + {0.729216, 0.219437, 0.471279}, + {0.735616, 0.221713, 0.46918}, + {0.742004, 0.224025, 0.467018}, + {0.742004, 0.224025, 0.467018}, + {0.748378, 0.226377, 0.464794}, + {0.754737, 0.228772, 0.462509}, + {0.761077, 0.231214, 0.460162}, + {0.767398, 0.233705, 0.457755}, + {0.767398, 0.233705, 0.457755}, + {0.773695, 0.236249, 0.455289}, + {0.779968, 0.238851, 0.452765}, + {0.786212, 0.241514, 0.450184}, + {0.792427, 0.244242, 0.447543}, + {0.792427, 0.244242, 0.447543}, + {0.798608, 0.24704, 0.444848}, + {0.804752, 0.249911, 0.442102}, + {0.810855, 0.252861, 0.439305}, + {0.816914, 0.255895, 0.436461}, + {0.816914, 0.255895, 0.436461}, + {0.822926, 0.259016, 0.433573}, + {0.828886, 0.262229, 0.430644}, + {0.834791, 0.26554, 0.427671}, + {0.840636, 0.268953, 0.424666}, + {0.840636, 0.268953, 0.424666}, + {0.846416, 0.272473, 0.421631}, + {0.852126, 0.276106, 0.418573}, + {0.857763, 0.279857, 0.415496}, + {0.86332, 0.283729, 0.412403}, + {0.86332, 0.283729, 0.412403}, + {0.868793, 0.287728, 0.409303}, + {0.874176, 0.291859, 0.406205}, + {0.879464, 0.296125, 0.403118}, + {0.884651, 0.30053, 0.400047}, + {0.884651, 0.30053, 0.400047}, + {0.889731, 0.305079, 0.397002}, + {0.8947, 0.309773, 0.393995}, + {0.899552, 0.314616, 0.391037}, + {0.904281, 0.31961, 0.388137}, + {0.904281, 0.31961, 0.388137}, + {0.908884, 0.324755, 0.385308}, + {0.913354, 0.330052, 0.382563}, + {0.917689, 0.3355, 0.379915}, + {0.921884, 0.341098, 0.377376}, + {0.921884, 0.341098, 0.377376}, + {0.925937, 0.346844, 0.374959}, + {0.929845, 0.352734, 0.372677}, + {0.933606, 0.358764, 0.370541}, + {0.937221, 0.364929, 0.368567}, + {0.937221, 0.364929, 0.368567}, + {0.940687, 0.371224, 0.366762}, + {0.944006, 0.377643, 0.365136}, + {0.94718, 0.384178, 0.363701}, + {0.95021, 0.39082, 0.362468}, + {0.95021, 0.39082, 0.362468}, + {0.953099, 0.397563, 0.361438}, + {0.955849, 0.4044, 0.360619}, + {0.958464, 0.411324, 0.360014}, + {0.960949, 0.418323, 0.35963}, + {0.960949, 0.418323, 0.35963}, + {0.96331, 0.42539, 0.359469}, + {0.965549, 0.432519, 0.359529}, + {0.967671, 0.439703, 0.35981}, + {0.96968, 0.446936, 0.360311}, + {0.96968, 0.446936, 0.360311}, + {0.971582, 0.45421, 0.36103}, + {0.973381, 0.46152, 0.361965}, + {0.975082, 0.468861, 0.363111}, + {0.97669, 0.476226, 0.364466}, + {0.97669, 0.476226, 0.364466}, + {0.97821, 0.483612, 0.366025}, + {0.979645, 0.491014, 0.367783}, + {0.981, 0.498428, 0.369734}, + {0.982279, 0.505851, 0.371874}, + {0.982279, 0.505851, 0.371874}, + {0.983485, 0.51328, 0.374198}, + {0.984622, 0.520713, 0.376698}, + {0.985693, 0.528148, 0.379371}, + {0.9867, 0.535582, 0.38221}, + {0.9867, 0.535582, 0.38221}, + {0.987646, 0.543015, 0.38521}, + {0.988533, 0.550446, 0.388365}, + {0.989363, 0.557873, 0.391671}, + {0.990138, 0.565296, 0.395122}, + {0.990138, 0.565296, 0.395122}, + {0.990871, 0.572706, 0.398714}, + {0.991558, 0.580107, 0.402441}, + {0.992196, 0.587502, 0.406299}, + {0.992785, 0.594891, 0.410283}, + {0.992785, 0.594891, 0.410283}, + {0.993326, 0.602275, 0.41439}, + {0.993834, 0.609644, 0.418613}, + {0.994309, 0.616999, 0.42295}, + {0.994738, 0.62435, 0.427397}, + {0.994738, 0.62435, 0.427397}, + {0.995122, 0.631696, 0.431951}, + {0.99548, 0.639027, 0.436607}, + {0.99581, 0.646344, 0.441361}, + {0.996096, 0.653659, 0.446213}, + {0.996096, 0.653659, 0.446213}, + {0.996341, 0.660969, 0.45116}, + {0.99658, 0.668256, 0.456192}, + {0.996775, 0.675541, 0.461314}, + {0.996925, 0.682828, 0.466526}, + {0.996925, 0.682828, 0.466526}, + {0.997077, 0.690088, 0.471811}, + {0.997186, 0.697349, 0.477182}, + {0.997254, 0.704611, 0.482635}, + {0.997325, 0.711848, 0.488154}, + {0.997325, 0.711848, 0.488154}, + {0.997351, 0.719089, 0.493755}, + {0.997351, 0.726324, 0.499428}, + {0.997341, 0.733545, 0.505167}, + {0.997285, 0.740772, 0.510983}, + {0.997285, 0.740772, 0.510983}, + {0.997228, 0.747981, 0.516859}, + {0.997138, 0.75519, 0.522806}, + {0.997019, 0.762398, 0.528821}, + {0.996898, 0.769591, 0.534892}, + {0.996898, 0.769591, 0.534892}, + {0.996727, 0.776795, 0.541039}, + {0.996571, 0.783977, 0.547233}, + {0.996369, 0.791167, 0.553499}, + {0.996162, 0.798348, 0.55982}, + {0.996162, 0.798348, 0.55982}, + {0.995932, 0.805527, 0.566202}, + {0.99568, 0.812706, 0.572645}, + {0.995424, 0.819875, 0.57914}, + {0.995131, 0.827052, 0.585701}, + {0.995131, 0.827052, 0.585701}, + {0.994851, 0.834213, 0.592307}, + {0.994524, 0.841387, 0.598983}, + {0.994222, 0.84854, 0.605696}, + {0.993866, 0.855711, 0.612482}, + {0.993866, 0.855711, 0.612482}, + {0.993545, 0.862859, 0.619299}, + {0.99317, 0.870024, 0.626189}, + {0.992831, 0.877168, 0.633109}, + {0.99244, 0.88433, 0.640099}, + {0.99244, 0.88433, 0.640099}, + {0.992089, 0.89147, 0.647116}, + {0.991688, 0.898627, 0.654202}, + {0.991332, 0.905763, 0.661309}, + {0.99093, 0.912915, 0.668481}, + {0.99093, 0.912915, 0.668481}, + {0.99057, 0.920049, 0.675675}, + {0.990175, 0.927196, 0.682926}, + {0.989815, 0.934329, 0.690198}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989077, 0.948604, 0.704863}, + {0.988717, 0.955742, 0.712242}, + {0.988367, 0.962878, 0.719649}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.987691, 0.977154, 0.734536}, + {0.987387, 0.984288, 0.742002}, + {0.987053, 0.991438, 0.749504}, +}; + +const size_t magma_cal_ref_n = 256; +const float magma_cal_ref_palette[magma_cal_ref_n][3] = { + {0.232077, 0.059889, 0.437695}, + {0.25222, 0.059415, 0.453248}, + {0.278493, 0.061978, 0.46919}, + {0.304081, 0.067835, 0.480812}, + {0.329114, 0.075972, 0.489287}, + {0.353773, 0.085373, 0.495501}, + {0.372116, 0.092816, 0.499053}, + {0.396467, 0.102902, 0.502658}, + {0.414709, 0.110431, 0.504662}, + {0.439062, 0.120298, 0.506555}, + {0.463508, 0.129893, 0.507652}, + {0.481929, 0.136891, 0.507989}, + {0.500438, 0.143719, 0.50792}, + {0.519045, 0.150383, 0.507443}, + {0.537755, 0.156894, 0.506551}, + {0.562866, 0.165368, 0.504692}, + {0.581819, 0.171596, 0.502777}, + {0.600868, 0.177743, 0.500394}, + {0.620005, 0.18384, 0.497524}, + {0.639216, 0.189921, 0.49415}, + {0.658483, 0.196027, 0.490253}, + {0.677786, 0.202203, 0.485819}, + {0.690661, 0.206384, 0.482558}, + {0.716387, 0.214982, 0.47529}, + {0.729216, 0.219437, 0.471279}, + {0.748378, 0.226377, 0.464794}, + {0.767398, 0.233705, 0.457755}, + {0.779968, 0.238851, 0.452765}, + {0.792427, 0.244242, 0.447543}, + {0.816914, 0.255895, 0.436461}, + {0.828886, 0.262229, 0.430644}, + {0.840636, 0.268953, 0.424666}, + {0.857763, 0.279857, 0.415496}, + {0.86332, 0.283729, 0.412403}, + {0.879464, 0.296125, 0.403118}, + {0.889731, 0.305079, 0.397002}, + {0.904281, 0.31961, 0.388137}, + {0.913354, 0.330052, 0.382563}, + {0.921884, 0.341098, 0.377376}, + {0.929845, 0.352734, 0.372677}, + {0.937221, 0.364929, 0.368567}, + {0.944006, 0.377643, 0.365136}, + {0.95021, 0.39082, 0.362468}, + {0.955849, 0.4044, 0.360619}, + {0.960949, 0.418323, 0.35963}, + {0.965549, 0.432519, 0.359529}, + {0.96968, 0.446936, 0.360311}, + {0.971582, 0.45421, 0.36103}, + {0.975082, 0.468861, 0.363111}, + {0.97669, 0.476226, 0.364466}, + {0.981, 0.498428, 0.369734}, + {0.982279, 0.505851, 0.371874}, + {0.984622, 0.520713, 0.376698}, + {0.9867, 0.535582, 0.38221}, + {0.987646, 0.543015, 0.38521}, + {0.989363, 0.557873, 0.391671}, + {0.990138, 0.565296, 0.395122}, + {0.990871, 0.572706, 0.398714}, + {0.992196, 0.587502, 0.406299}, + {0.992785, 0.594891, 0.410283}, + {0.993834, 0.609644, 0.418613}, + {0.994309, 0.616999, 0.42295}, + {0.994738, 0.62435, 0.427397}, + {0.99548, 0.639027, 0.436607}, + {0.99581, 0.646344, 0.441361}, + {0.996096, 0.653659, 0.446213}, + {0.996341, 0.660969, 0.45116}, + {0.996775, 0.675541, 0.461314}, + {0.996925, 0.682828, 0.466526}, + {0.997077, 0.690088, 0.471811}, + {0.997186, 0.697349, 0.477182}, + {0.997254, 0.704611, 0.482635}, + {0.997325, 0.711848, 0.488154}, + {0.997351, 0.719089, 0.493755}, + {0.997351, 0.726324, 0.499428}, + {0.997285, 0.740772, 0.510983}, + {0.997285, 0.740772, 0.510983}, + {0.997228, 0.747981, 0.516859}, + {0.997138, 0.75519, 0.522806}, + {0.997019, 0.762398, 0.528821}, + {0.996898, 0.769591, 0.534892}, + {0.996727, 0.776795, 0.541039}, + {0.996571, 0.783977, 0.547233}, + {0.996369, 0.791167, 0.553499}, + {0.996162, 0.798348, 0.55982}, + {0.996162, 0.798348, 0.55982}, + {0.995932, 0.805527, 0.566202}, + {0.99568, 0.812706, 0.572645}, + {0.995424, 0.819875, 0.57914}, + {0.995131, 0.827052, 0.585701}, + {0.995131, 0.827052, 0.585701}, + {0.995131, 0.827052, 0.585701}, + {0.994851, 0.834213, 0.592307}, + {0.994524, 0.841387, 0.598983}, + {0.994222, 0.84854, 0.605696}, + {0.993866, 0.855711, 0.612482}, + {0.993866, 0.855711, 0.612482}, + {0.993866, 0.855711, 0.612482}, + {0.993545, 0.862859, 0.619299}, + {0.99317, 0.870024, 0.626189}, + {0.99317, 0.870024, 0.626189}, + {0.992831, 0.877168, 0.633109}, + {0.99244, 0.88433, 0.640099}, + {0.99244, 0.88433, 0.640099}, + {0.99244, 0.88433, 0.640099}, + {0.99244, 0.88433, 0.640099}, + {0.992089, 0.89147, 0.647116}, + {0.991688, 0.898627, 0.654202}, + {0.991688, 0.898627, 0.654202}, + {0.991332, 0.905763, 0.661309}, + {0.991332, 0.905763, 0.661309}, + {0.99093, 0.912915, 0.668481}, + {0.99093, 0.912915, 0.668481}, + {0.99093, 0.912915, 0.668481}, + {0.99093, 0.912915, 0.668481}, + {0.99057, 0.920049, 0.675675}, + {0.99057, 0.920049, 0.675675}, + {0.990175, 0.927196, 0.682926}, + {0.990175, 0.927196, 0.682926}, + {0.990175, 0.927196, 0.682926}, + {0.989815, 0.934329, 0.690198}, + {0.989815, 0.934329, 0.690198}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989434, 0.94147, 0.697519}, + {0.989077, 0.948604, 0.704863}, + {0.989077, 0.948604, 0.704863}, + {0.989077, 0.948604, 0.704863}, + {0.988717, 0.955742, 0.712242}, + {0.988717, 0.955742, 0.712242}, + {0.988717, 0.955742, 0.712242}, + {0.988717, 0.955742, 0.712242}, + {0.988367, 0.962878, 0.719649}, + {0.988367, 0.962878, 0.719649}, + {0.988367, 0.962878, 0.719649}, + {0.988367, 0.962878, 0.719649}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.988033, 0.970012, 0.727077}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987691, 0.977154, 0.734536}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987387, 0.984288, 0.742002}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, + {0.987053, 0.991438, 0.749504}, +}; // clang-format on } // namespace viz diff --git a/ouster_viz/src/common.h b/ouster_viz/src/common.h index 6bf0d7e3..093e9da7 100644 --- a/ouster_viz/src/common.h +++ b/ouster_viz/src/common.h @@ -20,8 +20,8 @@ namespace impl { /** * load and compile GLSL shaders * - * @param vertex_shader_code code of vertex shader - * @param fragment_shader_code code of fragment shader + * @param[in] vertex_shader_code code of vertex shader + * @param[in] fragment_shader_code code of fragment shader * @return handle to program_id */ inline GLuint load_shaders(const std::string& vertex_shader_code, @@ -95,15 +95,18 @@ inline GLuint load_shaders(const std::string& vertex_shader_code, * load a texture from an array of GLfloat or equivalent * such as float[n][3] * - * @param texture array of at least size width * height * elements_per_texel - * where elements per texel is 3 for GL_RGB format and 1 for + * @tparam F The type for the texture object. + * + * @param[in] texture array of at least size + * width * height * elements_per_texel where elements + * per texel is 3 for GL_RGB format and 1 for * GL_RED format - * @param width width of texture in texels - * @param height height of texture in texels - * @param texture_id handle generated by glGenTextures - * @param internal_format internal format, e.g. GL_RGB or GL_RGB32F - * @param format format, e.g. GL_RGB or GL_RED - * @param type texture element type + * @param[in] width width of texture in texels + * @param[in] height height of texture in texels + * @param[in] texture_id handle generated by glGenTextures + * @param[in] internal_format internal format, e.g. GL_RGB or GL_RGB32F + * @param[in] format format, e.g. GL_RGB or GL_RED + * @param[in] type texture element type */ template void load_texture(const F& texture, size_t width, size_t height, @@ -130,26 +133,24 @@ void load_texture(const F& texture, size_t width, size_t height, * The point vertex shader supports transforming the point cloud by an array of * transformations. * - * @param xyz XYZ point before it was multiplied by range. - * Corresponds to the "xyzlut" used by LidarScan. - * - * @param range Range of each point. + * @param[in] xyz XYZ point before it was multiplied by range. + * Corresponds to the "xyzlut" used by LidarScan. * - * @param key Key for colouring each point for aesthetic reasons. + * @param[in] range Range of each point. * - * @param trans_index Index of which of the transformations to use for this - * point. Normalized between 0 and 1. (0 being the first - * 1 being the last). + * @param[in] key Key for colouring each point for aesthetic reasons. * - * @param model Extrinsic calibration of the lidar. + * @param[in] trans_index Index of which of the transformations to use for + * this point. Normalized between 0 and 1. (0 being the first 1 being the last). * - * @param transformation The w transformations are stored as a w x 4 texture. - * Each column of the texture corresponds one 4 x 4 - * transformation matrix, where the four pixels' rgb - * values correspond to four columns (3 rotation 1 - * translation) + * @param[in] model Extrinsic calibration of the lidar. * - * @param proj_view Camera view matrix controlled by the visualizer. + * @param[in] transformation The w transformations are stored as a w x 4 + * texture. Each column of the texture corresponds + * one 4 x 4 transformation matrix, where the + * four pixels' rgb values correspond to + * four columns (3 rotation 1 translation) + * @param[in] proj_view Camera view matrix controlled by the visualizer. */ static const std::string point_vertex_shader_code = R"SHADER( @@ -216,18 +217,42 @@ static const std::string ring_vertex_shader_code = R"SHADER( #version 330 core in vec3 ring_xyz; - uniform float ring_range; uniform mat4 proj_view; + out vec2 ring_xy; void main(){ - gl_Position = proj_view * vec4(ring_xyz * ring_range, 1.0); + gl_Position = proj_view * vec4(ring_xyz, 1.0); gl_Position.z = gl_Position.w; + ring_xy = ring_xyz.xy; })SHADER"; static const std::string ring_fragment_shader_code = R"SHADER( #version 330 core out vec4 color; + in vec2 ring_xy; + uniform float ring_range; + uniform float ring_thickness; void main() { - color = vec4(0.15, 0.15, 0.15, 1); + // Compute this fragment's distance from the center of the rings + float radius = length(ring_xy); + + // Convert to a signed distance from the nearest ring + float signedDistance = radius - round(radius/ring_range)*ring_range; + + // Compute how quickly distance changes per pixel at our location + // Make sure to do this using radius since it is mostly continuous + vec2 gradient = vec2(dFdx(radius), dFdy(radius)); + float len = length(gradient);// meters/pixel + + // Get far we are from the line in pixel coordinates + // meters/(meters/pixels) = pixels + float rangeFromLine = abs(signedDistance/len); + + // Draw a line within the thickness + float lineWeight = clamp(ring_thickness - rangeFromLine, 0.0f, 1.0f); + + // Don't draw anything outside our max radius or at the center + if (radius > 1000.0 || radius < ring_range*0.1) { lineWeight = 0; } + color = vec4(vec3(0.15)*lineWeight, 1.0); })SHADER"; static const std::string cuboid_vertex_shader_code = R"SHADER( diff --git a/ouster_viz/src/misc.cpp b/ouster_viz/src/misc.cpp index 9641b77d..46b030db 100644 --- a/ouster_viz/src/misc.cpp +++ b/ouster_viz/src/misc.cpp @@ -30,29 +30,51 @@ GLuint GLRings::ring_program_id; GLuint GLRings::ring_xyz_id; GLuint GLRings::ring_proj_view_id; GLuint GLRings::ring_range_id; - -GLRings::GLRings(const size_t points_per_ring_) - : points_per_ring(points_per_ring_), - ring_size_(1), - ring_line_width_(1), - rings_enabled(true) { - std::vector xyz(points_per_ring_ * 3, 0); - for (size_t i = 0; i < points_per_ring; i++) { - const GLfloat theta = i * 2.0 * M_PI / points_per_ring; - xyz[3 * i] = std::sin(theta); - xyz[3 * i + 1] = std::cos(theta); - xyz[3 * i + 2] = 0.0; +GLuint GLRings::ring_thickness_id; + +GLRings::GLRings() : ring_size(1), ring_line_width(1), rings_enabled(true) { + // Make a quad thats a bit larger than our maximum range + std::vector xyz(3 * 6, 0); + const float max_range = 1000; + // Point 0 + xyz[0] = -1.1; + xyz[1] = -1.1; + xyz[2] = 0.0; + // Point 1 + xyz[3] = 1.1; + xyz[4] = -1.1; + xyz[5] = 0.0; + // Point 2 + xyz[6] = 1.1; + xyz[7] = 1.1; + xyz[8] = 0.0; + // Point 3 + xyz[9] = -1.1; + xyz[10] = -1.1; + xyz[11] = 0.0; + // Point 4 + xyz[12] = -1.1; + xyz[13] = 1.1; + xyz[14] = 0.0; + // Point 5 + xyz[15] = 1.1; + xyz[16] = 1.1; + xyz[17] = 0.0; + + // scale to expected size + for (size_t i = 0; i < xyz.size(); i++) { + xyz[i] = xyz[i] * max_range; } glGenBuffers(1, &xyz_buffer); glBindBuffer(GL_ARRAY_BUFFER, xyz_buffer); - glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * points_per_ring * 3, - xyz.data(), GL_STATIC_DRAW); + glBufferData(GL_ARRAY_BUFFER, sizeof(GLfloat) * xyz.size(), xyz.data(), + GL_STATIC_DRAW); } void GLRings::update(const TargetDisplay& target) { rings_enabled = target.rings_enabled_; - ring_size_ = target.ring_size_; - ring_line_width_ = target.ring_line_width_; + ring_size = target.ring_size_; + ring_line_width = target.ring_line_width_; } void GLRings::draw(const WindowCtx&, const CameraData& camera) { @@ -62,8 +84,7 @@ void GLRings::draw(const WindowCtx&, const CameraData& camera) { if (!rings_enabled) return; glUseProgram(GLRings::ring_program_id); - glLineWidth(1); - const float radius = std::pow(10.0f, ring_size_); + const float radius = std::pow(10.0f, ring_size); // rings are displayed at the camera target, so model is inverse of target Eigen::Matrix4f mvp = (camera.proj * camera.view).cast(); glUniformMatrix4fv(GLRings::ring_proj_view_id, 1, GL_FALSE, mvp.data()); @@ -76,22 +97,10 @@ void GLRings::draw(const WindowCtx&, const CameraData& camera) { 0, // stride (void*)0 // array buffer offset ); - const GLfloat max_radius = 1000; - const GLfloat max_rings = 2000; // for performance - for (GLfloat r = radius, rr = 0; r < max_radius && rr < max_rings; - r += radius, rr += 1) { - glUniform1f(GLRings::ring_range_id, r); - glDrawArrays(GL_LINE_LOOP, 0, points_per_ring); - // Making more paths to thicken the line - // TODO[pb]: Need to find a better way to draw a thick lines, this - // method is too gross (slow and rugged) :( - for (int lw = 1; lw < ring_line_width_; ++lw) { - glUniform1f(GLRings::ring_range_id, r + lw * 0.02); - glDrawArrays(GL_LINE_LOOP, 0, points_per_ring); - glUniform1f(GLRings::ring_range_id, r - lw * 0.02); - glDrawArrays(GL_LINE_LOOP, 0, points_per_ring); - } - } + + glUniform1f(GLRings::ring_range_id, radius); + glUniform1f(GLRings::ring_thickness_id, ring_line_width); + glDrawArrays(GL_TRIANGLES, 0, 6); glDisableVertexAttribArray(GLRings::ring_xyz_id); } @@ -103,6 +112,8 @@ void GLRings::initialize() { glGetUniformLocation(ring_program_id, "proj_view"); GLRings::ring_range_id = glGetUniformLocation(ring_program_id, "ring_range"); + GLRings::ring_thickness_id = + glGetUniformLocation(ring_program_id, "ring_thickness"); GLRings::initialized = true; } diff --git a/ouster_viz/src/misc.h b/ouster_viz/src/misc.h index 62f8d7d3..9c755186 100644 --- a/ouster_viz/src/misc.h +++ b/ouster_viz/src/misc.h @@ -27,19 +27,18 @@ class GLRings { static GLuint ring_xyz_id; static GLuint ring_proj_view_id; static GLuint ring_range_id; + static GLuint ring_thickness_id; - const size_t points_per_ring; GLuint xyz_buffer; - int ring_size_; - int ring_line_width_; + int ring_size; + int ring_line_width; bool rings_enabled; public: /* - * Parameter etermines number of points used to draw ring, the more the - * rounder + * Instantiate the rings */ - GLRings(const size_t points_per_ring_ = 512); + GLRings(); void update(const TargetDisplay& target); diff --git a/ouster_viz/src/point_viz.cpp b/ouster_viz/src/point_viz.cpp index 468c32de..03580398 100644 --- a/ouster_viz/src/point_viz.cpp +++ b/ouster_viz/src/point_viz.cpp @@ -141,6 +141,7 @@ struct PointViz::Impl { double fps_last_time_{0}; uint64_t fps_frame_counter_{0}; double fps_{0}; + bool update_on_input_{true}; Impl(std::unique_ptr&& glfw) : glfw{std::move(glfw)} {} }; @@ -229,6 +230,10 @@ void PointViz::running(bool state) { pimpl->glfw->running(state); } void PointViz::visible(bool state) { pimpl->glfw->visible(state); } +bool PointViz::update_on_input() { return pimpl->update_on_input_; } + +void PointViz::update_on_input(bool state) { pimpl->update_on_input_ = state; } + bool PointViz::update() { std::lock_guard guard{pimpl->update_mx}; @@ -816,32 +821,32 @@ void add_default_controls(viz::PointViz& viz, std::mutex* mx) { switch (key) { case GLFW_KEY_W: viz.camera().pitch(5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_S: viz.camera().pitch(-5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_A: viz.camera().yaw(5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_D: viz.camera().yaw(-5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_EQUAL: viz.camera().dolly(5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_MINUS: viz.camera().dolly(-5); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_0: orthographic = !orthographic; viz.camera().set_orthographic(orthographic); - viz.update(); + if (viz.update_on_input()) viz.update(); break; case GLFW_KEY_ESCAPE: viz.running(false); @@ -853,7 +858,7 @@ void add_default_controls(viz::PointViz& viz, std::mutex* mx) { switch (key) { case GLFW_KEY_R: viz.camera().reset(); - viz.update(); + if (viz.update_on_input()) viz.update(); break; default: break; @@ -862,7 +867,7 @@ void add_default_controls(viz::PointViz& viz, std::mutex* mx) { switch (key) { case GLFW_KEY_R: viz.camera().birds_eye_view(); - viz.update(); + if (viz.update_on_input()) viz.update(); break; default: break; @@ -875,7 +880,7 @@ void add_default_controls(viz::PointViz& viz, std::mutex* mx) { auto lock = mx ? std::unique_lock{*mx} : std::unique_lock{}; viz.camera().dolly(static_cast(yoff * 5)); - viz.update(); + if (viz.update_on_input()) viz.update(); return true; }); @@ -900,7 +905,7 @@ void add_default_controls(viz::PointViz& viz, std::mutex* mx) { dy *= 2.0 / window_diagonal; viz.camera().dolly_xy(dx, dy); } - viz.update(); + if (viz.update_on_input()) viz.update(); return true; }); } diff --git a/python/CMakeLists.txt b/python/CMakeLists.txt index a7e8b3a8..5d2d8566 100644 --- a/python/CMakeLists.txt +++ b/python/CMakeLists.txt @@ -7,6 +7,7 @@ list(APPEND CMAKE_MODULE_PATH ${OUSTER_SDK_PATH}/cmake) # configure vcpkg from environment variables, if present include(VcpkgEnv) +include(Coverage) project(python-ouster-sdk) @@ -41,7 +42,7 @@ endif() # CMAKE_LIBRARY_OUTPUT_DIRECTORY is set in setup.py to the root of the `ouster` # namespace, but we have to provide per-target packages directories for each # extension module here. -set(EXT_DIR ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}) +set(EXT_DIR ${CMAKE_LIBRARY_OUTPUT_DIRECTORY}/sdk) # Note: With multi-configuration generators (like for VS), CMake automatically # appends build-configuration suffix to *_OUTPUT_DIRECTORY properties *unless* @@ -53,7 +54,8 @@ target_link_libraries(_client ouster_client ouster_build spdlog::spdlog) - +CodeCoverageFunctionality(_client) + target_include_directories(_client SYSTEM PRIVATE ${EIGEN3_INCLUDE_DIR}) set_target_properties(_client PROPERTIES POSITION_INDEPENDENT_CODE TRUE @@ -64,6 +66,7 @@ target_link_libraries(_pcap PRIVATE ouster_pcap ouster_build) set_target_properties(_pcap PROPERTIES POSITION_INDEPENDENT_CODE TRUE LIBRARY_OUTPUT_DIRECTORY ${EXT_DIR}/pcap/$<0:>) +CodeCoverageFunctionality(_pcap) pybind11_add_module(_viz src/cpp/_viz.cpp) target_link_libraries(_viz PRIVATE ouster_client ouster_viz ouster_build) @@ -71,9 +74,11 @@ target_include_directories(_viz SYSTEM PRIVATE ${EIGEN3_INCLUDE_DIR}) set_target_properties(_viz PROPERTIES POSITION_INDEPENDENT_CODE TRUE LIBRARY_OUTPUT_DIRECTORY ${EXT_DIR}/viz/$<0:>) +CodeCoverageFunctionality(_viz) pybind11_add_module(_osf src/cpp/_osf.cpp) target_link_libraries(_osf PRIVATE ouster_osf ouster_build) set_target_properties(_osf PROPERTIES POSITION_INDEPENDENT_CODE TRUE LIBRARY_OUTPUT_DIRECTORY ${EXT_DIR}/osf/$<0:>) +CodeCoverageFunctionality(_osf) diff --git a/python/Dockerfile b/python/Dockerfile index 03b5f9a5..42db8209 100644 --- a/python/Dockerfile +++ b/python/Dockerfile @@ -51,7 +51,7 @@ WORKDIR ${BUILD_HOME} RUN set -xe \ # use oldest available, supported python as tox default -&& PYTHON=$(which python3.7 python3.8 python3.9 python3.10 | head -1) \ +&& PYTHON=$(which python3.8 python3.9 python3.10 | head -1) \ && $PYTHON -m pip install --no-cache-dir --user -U pip tox pybind11 # Populate source dir diff --git a/python/README.rst b/python/README.rst index 8eca3055..73b58c2d 100644 --- a/python/README.rst +++ b/python/README.rst @@ -12,6 +12,7 @@ sensor data in both languages. The SDK includes APIs for: * Querying and setting sensor configuration * Recording and reading data in pcap format +* Recording and reading data in :ref:`Open Sensor Format (OSF)` * Reading and buffering sensor UDP data streams reliably * Conversion of raw data to range/signal/near_ir/reflectivity images (destaggering) * Efficient projection of range measurements to Cartesian (x, y, z) coordinates @@ -42,7 +43,7 @@ Pre-built binaries are provided on `PyPI`_ for the following platforms: Building from source is supported on: - Ubuntu 20.04, 22.04, and Debian 11 (x86-64, aarch64) -- macOS >= 10.15 (x86-64), >= 11.0 (arm64) +- macOS >= 11.0 (arm64, x86-64) - Windows 10 (x86-64) The Ouster SDK drops languages and platforms as they exit their standard support cycle. Please diff --git a/python/mypy.ini b/python/mypy.ini index 3909a323..adb17ede 100644 --- a/python/mypy.ini +++ b/python/mypy.ini @@ -19,4 +19,4 @@ ignore_missing_imports = True ignore_missing_imports = True [mypy-requests.*] -ignore_missing_imports = True +ignore_missing_imports = True \ No newline at end of file diff --git a/python/pyproject.toml b/python/pyproject.toml index 565ed28d..d53e33fb 100644 --- a/python/pyproject.toml +++ b/python/pyproject.toml @@ -1,7 +1,7 @@ [build-system] build-backend = "setuptools.build_meta" -# use same cmake version as vcpkg (2023.02.24) for pep-517 builds -requires = ["setuptools>=40.1.0", "wheel", "cmake==3.21.1", "pybind11==2.10.4"] +# use same cmake version as vcpkg (2023.10.19) for pep-517 builds +requires = ["setuptools==69.2.0", "wheel", "cmake==3.24.2", "pybind11==2.10.4"] [tool.pytest.ini_options] testpaths = ["tests"] diff --git a/python/setup.cfg b/python/setup.cfg index aa2fb2ac..fcca5410 100644 --- a/python/setup.cfg +++ b/python/setup.cfg @@ -17,11 +17,11 @@ classifiers = Programming Language :: Python Programming Language :: Python :: 3 Programming Language :: Python :: 3 :: Only - Programming Language :: Python :: 3.7 Programming Language :: Python :: 3.8 Programming Language :: Python :: 3.9 Programming Language :: Python :: 3.10 Programming Language :: Python :: 3.11 + Programming Language :: Python :: 3.12 Topic :: Software Development :: Libraries Topic :: System :: Hardware :: Hardware Drivers Topic :: Scientific/Engineering diff --git a/python/setup.py b/python/setup.py index bdf5ec0c..4642780f 100644 --- a/python/setup.py +++ b/python/setup.py @@ -66,7 +66,7 @@ def build_extension(self, ext): build_args = ['--config', cfg] env = os.environ.copy() - jobs = os.getenv('OUSTER_SDK_BUILD_JOBS', 2) + jobs = os.getenv('OUSTER_SDK_BUILD_JOBS', os.cpu_count()) build_args += ['--', f'-j{jobs}'] if platform.system() == "Windows": @@ -82,16 +82,25 @@ def build_extension(self, ext): if not os.path.exists(self.build_temp): os.makedirs(self.build_temp) - print("Running: ") + def cmake_log(message): + env = os.environ.copy() + log_file = env.get('OUSTER_SDK_CMAKE_LOG_FILE') + if log_file: + if os.path.exists(os.path.dirname(log_file)): + with open(log_file, 'a') as f: + f.write(str(message)) + print(message) + + cmake_log("Running: ") run = ['cmake', ext.sourcedir] + cmake_args - print(run) + cmake_log(run) output1 = subprocess.run(run, cwd=self.build_temp, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, text=True) - print("CMAKE CONFIG OUTPUT") - print(output1.stdout) + cmake_log("CMAKE CONFIG OUTPUT") + cmake_log(output1.stdout) if output1.returncode != 0: raise "Error running cmake" @@ -100,8 +109,8 @@ def build_extension(self, ext): stdout=subprocess.PIPE, stderr=subprocess.STDOUT, env=env, text=True) - print("CMAKE BUILD OUTPUT") - print(output2.stdout) + cmake_log("CMAKE BUILD OUTPUT") + cmake_log(output2.stdout) if output2.returncode != 0: raise "Error running cmake --build" @@ -136,6 +145,31 @@ def run(self): os.remove(file) +def install_requires(): + install_requires = [ + 'psutil >=5.9.5, <6', + 'zeroconf >=0.131.0', + 'click >=8.1.3, <9', + 'importlib_metadata ==6.6.0', + 'prettytable >= 2.1.0', + 'requests >=2.0, <3', + 'more-itertools >=8.6', + 'numpy >=1.19, <2, !=1.19.4', + # scipy is not supported on Mac M1 with Mac OS < 12.0 + 'scipy >=1.7, <2;platform_system != "Darwin" or platform_machine != "arm64" or platform_version >= "21.0.0"', + 'typing-extensions >=3.7.4.3', + 'Pillow >=9.2', + 'packaging', + 'setuptools; python_version >= "3.12"', + ] + env = os.environ.copy() + skip_mapping = env.get('OUSTER_SDK_SKIP_MAPPING') + if not skip_mapping: + install_requires.append('ouster-mapping==0.2.0rc6; python_version >= "3.8" and python_version <= "3.12"') + + return install_requires + + setup( name='ouster-sdk', url='https://github.com/ouster-lidar/ouster_example', @@ -144,11 +178,11 @@ def run(self): package_dir={'': 'src'}, packages=find_namespace_packages(where='src', include='ouster.*'), package_data={ - 'ouster.client': ['py.typed', '_client.pyi'], - 'ouster.pcap': ['py.typed', '_pcap.pyi'], - 'ouster.osf': ['py.typed', '_osf.pyi'], - 'ouster.viz': ['py.typed', '_viz.pyi'], - 'ouster.sdkx': ['py.typed'], + 'ouster.sdk.client': ['py.typed', '_client.pyi'], + 'ouster.sdk.pcap': ['py.typed', '_pcap.pyi'], + 'ouster.sdk.osf': ['py.typed', '_osf.pyi'], + 'ouster.sdk.viz': ['py.typed', '_viz.pyi'], + 'ouster.sdk.bag': ['py.typed'] }, author='Ouster Sensor SDK Developers', author_email='oss@ouster.io', @@ -163,27 +197,12 @@ def run(self): 'bdist_wheel': sdk_bdist_wheel, }, zip_safe=False, - python_requires='>=3.7, <4', - install_requires=[ - 'psutil >=5.9.5, <6', - 'zeroconf ==0.58.2', - 'click >=8.1.3, <9', - 'python-magic ==0.4.27', - 'importlib_metadata ==6.6.0', - 'prettytable >= 2.1.0', - 'requests >=2.0, <3', - 'more-itertools >=8.6', - 'numpy >=1.19, <2, !=1.19.4', - # scipy is not supported on Mac M1 with Mac OS < 12.0 - 'scipy >=1.7, <2;platform_system != "Darwin" or platform_machine != "arm64" or platform_version >= "21.0.0"', - 'typing-extensions >=3.7.4.3', - 'Pillow >=9.2', - 'packaging', - 'ouster-mapping>=0.1.0.dev3; python_version >= "3.8"', - ], + python_requires='>=3.8, <4', + install_requires=install_requires(), extras_require={ 'test': [ 'pytest >=7.0, <8', + 'pytest-asyncio', 'flask==2.2.5' ], 'dev': ['flake8', 'mypy', 'pylsp-mypy', 'python-lsp-server', 'yapf'], @@ -206,8 +225,8 @@ def run(self): }, entry_points={'console_scripts': [ - 'simple-viz=ouster.sdk.simple_viz:main', - 'convert-meta-to-legacy=ouster.sdk.convert_to_legacy:main', + 'simple-viz=ouster.sdk.simple_viz:main', # TODO[UN]: do we need to keep? + 'convert-meta-to-legacy=ouster.sdk.convert_to_legacy:main', # TODO[UN]: do we need to keep? 'ouster-cli=ouster.cli.core:run' ] } diff --git a/python/src/cpp/_client.cpp b/python/src/cpp/_client.cpp index 51f74879..e04edf01 100644 --- a/python/src/cpp/_client.cpp +++ b/python/src/cpp/_client.cpp @@ -40,7 +40,6 @@ #include #include -#include "ouster/buffered_udp_source.h" #include "ouster/client.h" #include "ouster/image_processing.h" #include "ouster/impl/build.h" @@ -48,6 +47,7 @@ #include "ouster/impl/profile_extension.h" #include "ouster/lidar_scan.h" #include "ouster/types.h" +#include "ouster/udp_packet_source.h" namespace py = pybind11; namespace chrono = std::chrono; @@ -58,13 +58,20 @@ using ouster::sensor::ChanField; using ouster::sensor::data_format; using ouster::sensor::ImuPacket; using ouster::sensor::LidarPacket; +using ouster::sensor::Packet; using ouster::sensor::packet_format; using ouster::sensor::sensor_config; using ouster::sensor::sensor_info; using ouster::sensor::impl::BufferedUDPSource; +using ouster::sensor::impl::Event; using ouster::sensor::impl::packet_writer; +using ouster::sensor::impl::Producer; +using ouster::sensor::impl::UDPPacketSource; using namespace ouster; +using client_shared_ptr = std::shared_ptr; +PYBIND11_MAKE_OPAQUE(client_shared_ptr); + namespace pybind11 { namespace detail { template @@ -97,6 +104,8 @@ extern Table udp_profile_imu_strings; extern Table shot_limiting_status_strings; extern Table thermal_shutdown_status_strings; +extern Table full_scale_range_strings; +extern Table return_order_strings; } // namespace impl } // namespace sensor @@ -414,6 +423,7 @@ PYBIND11_MODULE(_client, m) { .def_readonly("col_footer_size", &packet_format::col_footer_size) .def_readonly("col_size", &packet_format::col_size) .def_readonly("packet_footer_size", &packet_format::packet_footer_size) + .def_readonly("max_frame_id", &packet_format::max_frame_id) .def("field_value_mask", &packet_format::field_value_mask) .def("field_bitness", &packet_format::field_bitness) @@ -583,15 +593,15 @@ PYBIND11_MODULE(_client, m) { .def("set_field", set_field{}); m.def("scan_to_packets", - [](const LidarScan& ls, const packet_writer& pw) -> py::list { + [](const LidarScan& ls, const packet_writer& pw, uint32_t init_id, uint64_t prod_sn) -> py::list { py::list packets; py::object class_type = - py::module::import("ouster.client").attr("LidarPacket"); + py::module::import("ouster.sdk.client").attr("LidarPacket"); auto append_pypacket = [&](const LidarPacket& packet) { py::object pypacket = class_type(py::arg("packet_format") = pw); // next couple lines should not fail unless someone messes with - // ouster.client.LidarPacket implementation + // ouster.sdk.client.LidarPacket implementation LidarPacket* p_ptr = pypacket.cast(); if (p_ptr->buf.size() != packet.buf.size()) throw std::invalid_argument("packet sizes don't match"); @@ -601,7 +611,7 @@ PYBIND11_MODULE(_client, m) { }; auto iter = make_lambda_iter(append_pypacket); - impl::scan_to_packets(ls, pw, iter); + impl::scan_to_packets(ls, pw, iter, init_id, prod_sn); return packets; }); @@ -709,7 +719,7 @@ PYBIND11_MODULE(_client, m) { .def("__eq__", [](const sensor_info& i, const sensor_info& j) { return i == j; }) .def("__repr__", [](const sensor_info& self) { const auto mode = self.mode ? to_string(self.mode) : std::to_string(self.format.fps) + "fps"; - return ""; }) .def("__copy__", [](const sensor_info& self) { return sensor_info{self}; }) @@ -761,6 +771,18 @@ PYBIND11_MODULE(_client, m) { Applicable to several Polarity settings on sensor.)"); def_enum(Polarity, sensor::impl::polarity_strings, "POLARITY_"); + auto ReturnOrder = py::enum_(m, "ReturnOrder", R"( + Sensor return order. + + See sensor documentation for details.)"); + def_enum(ReturnOrder, sensor::impl::return_order_strings, "ORDER_"); + + auto FullScaleRange = py::enum_(m, "FullScaleRange", R"( + IMU output scale range. + + See sensor documentation for details.)"); + def_enum(FullScaleRange, sensor::impl::full_scale_range_strings, "FSR_"); + auto NMEABaudRate = py::enum_(m, "NMEABaudRate", R"( Expected baud rate sensor attempts to decode for NMEA UART input $GPRMC messages.)"); def_enum(NMEABaudRate, sensor::impl::nmea_baud_rate_strings); @@ -833,6 +855,10 @@ PYBIND11_MODULE(_client, m) { .def_readwrite("columns_per_packet", &sensor_config::columns_per_packet, "Measurement blocks per UDP packet. See sensor documentation for details.") .def_readwrite("udp_profile_lidar", &sensor_config::udp_profile_lidar, "UDP packet format for lidar data. See sensor documentation for details.") .def_readwrite("udp_profile_imu", &sensor_config::udp_profile_imu, "UDP packet format for imu data. See sensor documentation for details.") + .def_readwrite("gyro_fsr", &sensor_config::gyro_fsr, "The gyro full scale measurement range to use. See sensor documentation for details.") + .def_readwrite("accel_fsr", &sensor_config::accel_fsr, "The accelerometer full scale measurement range to use. See sensor documentation for details.") + .def_readwrite("return_order", &sensor_config::return_order, "The priority of sensor returns to output. See sensor documentation for details.") + .def_readwrite("min_range_threshold_cm", &sensor_config::min_range_threshold_cm, "The minimum detection range of the sensor in cm. See sensor documentation for details.") .def("__str__", [](const sensor_config& i) { return to_string(i); }) .def("__eq__", [](const sensor_config& i, const sensor_config& j) { return i == j; }) .def("__copy__", [](const sensor_config& self) { return sensor_config{self}; }) @@ -929,7 +955,7 @@ PYBIND11_MODULE(_client, m) { .def_readwrite("major", &util::version::major) .def_readwrite("minor", &util::version::minor) .def_readwrite("patch", &util::version::patch) - .def_static("from_string", &util::version_of_string); + .def_static("from_string", &util::version_from_string); m.attr("invalid_version") = util::invalid_version; @@ -937,6 +963,70 @@ PYBIND11_MODULE(_client, m) { // clang-format on + // client + py::class_(m, "SensorConnection") + .def(py::init([](std::string hostname, int lidar_port, + int imu_port) -> client_shared_ptr { + auto cli = sensor::init_client(hostname, lidar_port, imu_port); + if (!cli) + throw std::runtime_error( + "Failed initializing sensor connection"); + return cli; + }), + py::arg("hostname"), py::arg("lidar_port") = 7502, + py::arg("imu_port") = 7503) + .def(py::init([](std::string hostname, std::string udp_dest_host, + sensor::lidar_mode lp_mode, + sensor::timestamp_mode ts_mode, int lidar_port, + int imu_port, int timeout_sec, + bool persist_config) -> client_shared_ptr { + auto cli = sensor::init_client( + hostname, udp_dest_host, lp_mode, ts_mode, lidar_port, + imu_port, timeout_sec, persist_config); + if (!cli) + throw std::runtime_error( + "Failed initializing sensor connection"); + return cli; + }), + py::arg("hostname"), py::arg("udp_dest_host"), + py::arg("mode") = sensor::lidar_mode::MODE_1024x10, + py::arg("timestamp_mode") = + sensor::timestamp_mode::TIME_FROM_INTERNAL_OSC, + py::arg("lidar_port") = 0, py::arg("imu_port") = 0, + py::arg("timeout_sec") = 10, py::arg("persist_config") = false) + .def( + "poll", + [](const client_shared_ptr& self, + int timeout_sec) -> sensor::client_state { + return sensor::poll_client(*self, timeout_sec); + }, + py::arg("timeout_sec") = 1) + .def("read_lidar_packet", + [](const client_shared_ptr& self, LidarPacket& packet) -> bool { + return sensor::read_lidar_packet(*self, packet); + }) + .def("read_imu_packet", + [](const client_shared_ptr& self, ImuPacket& packet) -> bool { + return sensor::read_imu_packet(*self, packet); + }) + .def_property_readonly("lidar_port", + [](const client_shared_ptr& self) -> int { + return sensor::get_lidar_port(*self); + }) + .def_property_readonly("imu_port", + [](const client_shared_ptr& self) -> int { + return sensor::get_imu_port(*self); + }) + .def( + "get_metadata", + [](client_shared_ptr& self, int timeout_sec, + bool legacy_format) -> std::string { + return sensor::get_metadata(*self, timeout_sec, legacy_format); + }, + py::arg("timeout_sec") = DEFAULT_HTTP_REQUEST_TIMEOUT_SECONDS, + py::arg("legacy") = false) + .def("shutdown", [](client_shared_ptr& self) { self.reset(); }); + // Client Handle py::enum_(m, "ClientState", py::arithmetic()) .value("TIMEOUT", sensor::client_state::TIMEOUT) @@ -945,24 +1035,84 @@ PYBIND11_MODULE(_client, m) { .value("IMU_DATA", sensor::client_state::IMU_DATA) .value("EXIT", sensor::client_state::EXIT) // TODO: revisit including in C++ API - .value("OVERFLOW", - sensor::client_state(BufferedUDPSource::CLIENT_OVERFLOW)); + .value("OVERFLOW", sensor::client_state(Producer::CLIENT_OVERFLOW)); + + py::class_(m, "Event") + .def(py::init()) + .def_readwrite("source", &Event::source) + .def_readwrite("state", &Event::state); + + py::class_(m, "UDPPacketSource") + .def(py::init()) + .def( + "add_client", + [](UDPPacketSource& self, client_shared_ptr cli, + size_t lidar_buf_size, size_t lidar_packet_size, + size_t imu_buf_size, size_t imu_packet_size) { + self.add_client(cli, lidar_buf_size, lidar_packet_size, + imu_buf_size, imu_packet_size); + }, + py::arg("connection"), py::arg("lidar_buf_size"), + py::arg("lidar_packet_size"), py::arg("imu_buf_size"), + py::arg("imu_buf_size")) + .def( + "add_client", + [](UDPPacketSource& self, client_shared_ptr cli, + const sensor_info& info, float seconds_to_buffer) { + self.add_client(cli, info, seconds_to_buffer); + }, + py::arg("connection"), py::arg("metadata"), + py::arg("seconds_to_buffer")) + .def("shutdown", [](UDPPacketSource& self) { self.shutdown(); }) + // clang-format off + .def_property_readonly("size", [](const UDPPacketSource& self) { + return self.size(); + }) + // clang-format on + .def_property_readonly( + "capacity", + [](const UDPPacketSource& self) { return self.capacity(); }) + .def("produce", + [](UDPPacketSource& self) { + py::gil_scoped_release release; + self.produce(); + }) + .def("pop", + [](UDPPacketSource& self, float timeout_sec) -> Event { + py::gil_scoped_release release; + return self.pop(timeout_sec); + }) + .def( + "packet", + [](UDPPacketSource& self, Event e) -> Packet& { + return self.packet(e); + }, + py::return_value_policy::reference) + .def("advance", [](UDPPacketSource& self, Event e) { self.advance(e); }) + .def("flush", [](UDPPacketSource& self) { self.flush(); }); py::class_(m, "Client") - .def(py::init(), py::arg("hostname"), - py::arg("lidar_port"), py::arg("imu_port"), - py::arg("capacity") = 128) - .def(py::init(), - py::arg("hostname"), py::arg("udp_dest_host"), - py::arg("mode") = sensor::lidar_mode::MODE_1024x10, - py::arg("timestamp_mode") = - sensor::timestamp_mode::TIME_FROM_INTERNAL_OSC, - py::arg("lidar_port") = 0, py::arg("imu_port") = 0, - py::arg("timeout_sec") = 10, py::arg("capacity") = 128) - .def("get_metadata", &BufferedUDPSource::get_metadata, - py::arg("timeout_sec") = 10, py::arg("legacy") = true) - .def("shutdown", &BufferedUDPSource::shutdown) + .def(py::init(), + py::arg("connection"), py::arg("lidar_buf_size"), + py::arg("lidar_packet_size"), py::arg("imu_buf_size"), + py::arg("imu_buf_size")) + .def(py::init(), + py::arg("connection"), py::arg("metadata"), + py::arg("seconds_to_buffer")) + .def("shutdown", [](BufferedUDPSource& self) { self.shutdown(); }) + .def("pop", + [](BufferedUDPSource& self, + float timeout_sec) -> sensor::client_state { + py::gil_scoped_release release; + return self.pop(timeout_sec); + }) + .def( + "packet", + [](BufferedUDPSource& self, sensor::client_state st) -> Packet& { + return self.packet(st); + }, + py::return_value_policy::reference) + .def("advance", &BufferedUDPSource::advance) .def("consume", [](BufferedUDPSource& self, LidarPacket& lp, ImuPacket& ip, float timeout_sec) { @@ -992,15 +1142,19 @@ PYBIND11_MODULE(_client, m) { return res; }) .def("produce", - [](BufferedUDPSource& self, const packet_format& pf) { + [](BufferedUDPSource& self) { py::gil_scoped_release release; - self.produce(pf); + self.produce(); }) - .def("flush", &BufferedUDPSource::flush, py::arg("n_packets") = 0) - .def_property_readonly("capacity", &BufferedUDPSource::capacity) - .def_property_readonly("size", &BufferedUDPSource::size) - .def_property_readonly("lidar_port", &BufferedUDPSource::get_lidar_port) - .def_property_readonly("imu_port", &BufferedUDPSource::get_imu_port); + .def("flush", [](BufferedUDPSource& self) { self.flush(); }) + // clang-format off + .def_property_readonly("size", [](const BufferedUDPSource& self) { + return self.size(); + }) + // clang-format on + .def_property_readonly("capacity", [](const BufferedUDPSource& self) { + return self.capacity(); + }); // Scans py::class_(m, "LidarScan", R"( @@ -1049,7 +1203,7 @@ PYBIND11_MODULE(_client, m) { New LidarScan of specified dimensions expecting fields of specified profile )", - py::arg("w"), py::arg("h"), py::arg("profile"), + py::arg("h"), py::arg("w"), py::arg("profile"), py::arg("columns_per_packet") = DEFAULT_COLUMNS_PER_PACKET) .def( "__init__", @@ -1078,6 +1232,48 @@ PYBIND11_MODULE(_client, m) { )", py::arg("w"), py::arg("h"), py::arg("field_types"), py::arg("columns_per_packet") = DEFAULT_COLUMNS_PER_PACKET) + .def( + "__init__", + [](LidarScan& self, const LidarScan& source, + const std::map& field_types) { + LidarScanFieldTypes ft{}; + for (const auto& f : field_types) { + auto dtype = py::dtype::from_args(f.second); + ft.push_back( + std::make_pair(f.first, field_type_of_dtype(dtype))); + } + new (&self) LidarScan(source, ft); + }, + R"( + Initialize a lidar scan from another with only the indicated fields. + Casts, zero pads or removes fields from the original scan if necessary. + + Args: + source: LidarScan to copy data from + fields_dict: dict of fields to have in the new scan where keys are ChanFields + and values are type, e.g., {client.ChanField.SIGNAL: np.uint32} + + Returns: + New LidarScan with selected data copied over or zero padded + + )", + py::arg("source"), py::arg("field_types")) + .def( + "__init__", + [](LidarScan& self, const LidarScan& source) { + new (&self) LidarScan(source); + }, + R"( + Initialize a lidar scan with a copy of the data from another. + + Args: + source: LidarScan to copy + + Returns: + New LidarScan with data copied over from provided scan. + + )", + py::arg("source")) .def_readonly("w", &LidarScan::w, "Width or horizontal resolution of the scan.") .def_readonly("h", &LidarScan::h, @@ -1188,15 +1384,11 @@ PYBIND11_MODULE(_client, m) { .def("__repr__", [](const LidarScan& self) { std::stringstream ss; - ss << ""; return ss.str(); }) - .def("__str__", [](const LidarScan& self) { return to_string(self); }) - // for backwards compatibility: previously converted between Python - // / native representations, now a noop - .def("to_native", [](py::object& self) { return self; }) - .def_static("from_native", [](py::object& scan) { return scan; }); + .def("__str__", [](const LidarScan& self) { return to_string(self); }); // Destagger overloads for most numpy scalar types m.def("destagger_int8", &ouster::destagger); @@ -1346,7 +1538,6 @@ PYBIND11_MODULE(_client, m) { )", py::arg("udp_profile_lidar")); - using ouster::sensor::Packet; py::class_(m, "_Packet") .def(py::init(), py::arg("size") = 65536) // direct access to timestamp field diff --git a/python/src/cpp/_osf.cpp b/python/src/cpp/_osf.cpp index c33c0017..1b8c6b3f 100644 --- a/python/src/cpp/_osf.cpp +++ b/python/src/cpp/_osf.cpp @@ -100,25 +100,41 @@ to work with OSF files. )doc", py::arg("file"), py::arg("with_decoding") = false); - m.def("pcap_to_osf", &ouster::osf::pcap_to_osf, R"doc( - Convert Pcap file to OSF v2. - - :pcap_filename: Pcap file path to be converted - :meta_filename: metadata for recorded sensor data - :lidar_port: destination port for lidar_packets - :osf_filename: OSF output file - :returns: True on success, False on error + m.def("backup_osf_file_metablob", &ouster::osf::backup_osf_file_metablob, + R"doc( + Backup the metadata blob in an OSF file. + + :file: OSF file path (v1/v2) + :backup_file_name: Backup path + )doc", + py::arg("file"), py::arg("backup_file_name")); + + m.def("restore_osf_file_metablob", &ouster::osf::restore_osf_file_metablob, + R"doc( + Restore an OSF metadata blob from a backup file. + + :file: OSF file path (v1/v2) + :backup_file_name: The backup to use + )doc", + py::arg("file"), py::arg("backup_file_name")); + + m.def("osf_file_modify_metadata", &ouster::osf::osf_file_modify_metadata, + R"doc( + Modify an OSF files sensor_info metadata. + + :file_name: The OSF file to modify. + :new_metadata: Array containing sensor infos to write to the file. + :returns: The number of the bytes written to the OSF file. )doc", - py::arg("file"), py::arg("meta"), py::arg("lidar_port"), - py::arg("osf_filename"), py::arg("chunk_size") = 0); + py::arg("file_name"), py::arg("new_metadata")); // Reader py::class_(m, "Reader", R"( Reader is a main entry point to get any info out of OSF file. )") .def(py::init(), py::arg("file")) - .def_property_readonly("id", &osf::Reader::id, R"( - Data id + .def_property_readonly("metadata_id", &osf::Reader::metadata_id, R"( + Data id string )") .def_property_readonly( "start_ts", @@ -206,18 +222,6 @@ to work with OSF files. Requires the OSF with message_counts inside, i.e. has_message_idx() is ``True``, otherwise return value is always None. )") - .def( - "messages_standard", - [](osf::Reader& r) { - return py::make_iterator(r.messages_standard().begin(), - r.messages_standard().end()); - }, - py::keep_alive<0, 1>(), R"( - Creates an iterator to reads messages in ``STANDARD`` layout. - - ``STANDARD`` layout order is how messages stored physically in - the chunks. - )") .def( "chunks", [](osf::Reader& r) { @@ -541,52 +545,7 @@ to work with OSF files. }) .def_property_readonly("meta", &osf::LidarScanStream::meta, "`metadata entry` to store `LidarScanStream` " - "metadata in an OSF file") - .def( - py::init( - [](osf::Writer* writer, uint32_t sensor_meta_id, - const std::map& field_types) { - LidarScanFieldTypes ft{}; - for (const auto& f : field_types) { - auto dtype = py::dtype::from_args(f.second); - ft.push_back(std::make_pair( - f.first, field_type_of_dtype(dtype))); - } - return new osf::LidarScanStream(*writer, sensor_meta_id, - ft); - }), - py::arg("writer"), py::arg("sensor_meta_id"), - py::arg("field_types") = std::map{}, - py::keep_alive<1, 2>(), // ties Writer to the constructed - // LidarScanStream - R"( - Creates `LidarScanStream` for a ``writer`` - - Args: - writer (Writer): writes stream to the specified ``writer`` - sensor_meta_id (int): id of the metadata entry, that points - to ``LidarSensor`` with sensor intrinsics. - field_types (dict): set of fields to use from the - ``LidarScan``, it's used to create horizontal slices of - ``LidarScana`` (i.e. write only RANGE and SIGNAL fields) - )") - .def( - "log", - [](osf::LidarScanStream&, uint64_t, const LidarScan&) { - std::cerr << "LidarScanStream.log() method is removed, use " - "LidarScanStream.save() instead." - << std::endl; - std::abort(); - }, - "DEPRECATED and REMOVED") - .def( - "save", - [](osf::LidarScanStream& stream, uint64_t ts, const LidarScan& ls) { - stream.save(osf::ts_t{ts}, ls); - }, - py::arg("ts"), py::arg("ls"), - "Writes `LidarScan` (``ls``) object to a `stream`. (i.e. saves it " - "to an OSF file)"); + "metadata in an OSF file"); // StreamStats py::class_(m, "StreamStats", R"( @@ -683,29 +642,163 @@ to work with OSF files. and stream interfaces that encodes messages and passes them to internal chunks writer. )") - .def(py::init(), py::arg("file_name"), R"( + .def(py::init(), py::arg("file_name"), + py::arg("chunk_size") = 0, R"( + Creates a `Writer` with specified ``chunk_size``. + + Default ``chunk_size`` is ``2 MB``. + )") + .def( + py::init( + [](const std::string& filename, const sensor::sensor_info& info, + const std::map& field_types, + uint32_t chunk_size) { + LidarScanFieldTypes ft{}; + for (const auto& f : field_types) { + auto dtype = py::dtype::from_args(f.second); + ft.push_back(std::make_pair( + f.first, field_type_of_dtype(dtype))); + } + return new osf::Writer(filename, info, ft, chunk_size); + }), + py::arg("filename"), py::arg("info"), + py::arg("field_types") = std::map{}, + py::arg("chunk_size") = 0, + R"( Creates a `Writer` with deafault ``STREAMING`` layout chunks writer. - Using default ``chunk_size`` of ``5MB``. + Using default ``chunk_size`` of ``2MB``. + + Args: + filename (str): The filename to output to. + info (sensor_info): The sensor info vector to use for a multi stream OSF + file. + chunk_size (int): The chunk size in bytes to use for the OSF file. This arg + is optional, and if not provided the default value of 2MB + is used. If the current chunk being written exceeds the + chunk_size, a new chunk will be started on the next call to + `save`. This allows an application to tune the number of + messages (e.g. lidar scans) per chunk, which affects the + granularity of the message index stored in the + StreamingInfo in the file metadata. A smaller chunk_size + means more messages are indexed and a larger number of + index entries. A more granular index allows for more + precise seeking at the slight expense of a larger file. + field_types (Dict[ChanField, FieldDType]): The fields from scans to + actually save into the OSF. If not provided uses the fields from + the first saved lidar scan for each stream. This parameter is optional. + )") - .def(py::init(), - py::arg("file_name"), py::arg("metadata_id"), - py::arg("chunk_size") = 0, R"( - Creates a `Writer` with specified ``chunk_size``. + .def(py::init( + [](const std::string& filename, + const std::vector& info, + const std::map& field_types, + uint32_t chunk_size) { + LidarScanFieldTypes ft{}; + for (const auto& f : field_types) { + auto dtype = py::dtype::from_args(f.second); + ft.push_back(std::make_pair( + f.first, field_type_of_dtype(dtype))); + } + return new osf::Writer(filename, info, ft, chunk_size); + }), + py::arg("filename"), py::arg("info"), + py::arg("field_types") = std::map{}, + py::arg("chunk_size") = 0, + R"( + Creates a `Writer` with specified ``chunk_size``. + + Default ``chunk_size`` is ``2MB``. + + Args: + filename (str): The filename to output to. + info (List[sensor_info]): The sensor info vector to use for a + multi stream OSF file. + field_types (Dict[ChanField, FieldDType]): The fields from scans to + actually save into the OSF. If not provided uses the fields from + the first saved lidar scan for each stream. This parameter is optional. + chunk_size (int): The chunksize to use for the OSF file, this arg + is optional. - Default ``chunk_size`` is ``5 MB``. )") - .def_property_readonly("filename", &osf::Writer::filename, - "OSF file name where data is written to") .def( - "addMetadata", + "save", + [](osf::Writer& writer, uint32_t stream_index, + const LidarScan& scan) { writer.save(stream_index, scan); }, + py::arg("stream_index"), py::arg("scan"), + R"( + Save a lidar scan to the OSF file. + + Args: + stream_index (int): The index of the corrosponding + sensor_info to use. + scan (LidarScan): The scan to save. + + )") + .def( + "save", + [](osf::Writer& writer, uint32_t stream_index, + const LidarScan& scan, uint64_t ts) { + writer.save(stream_index, scan, ouster::osf::ts_t(ts)); + }, + py::arg("stream_index"), py::arg("scan"), py::arg("ts"), + R"( + Save a lidar scan to the OSF file. + + Args: + stream_index (int): The index of the corresponding + sensor_info to use. + scan (LidarScan): The scan to save. + ts (int): The timestamp to index the scan with. + )") + .def( + "save", + [](osf::Writer& writer, const std::vector& scans) { + writer.save(scans); + }, + py::arg("scan"), + R"( + Save a set of lidar scans to the OSF file. + + Args: + scans (List[LidarScan]): The scans to save. This will correspond + to the list of sensor_infos. + + )") + .def( + "set_metadata_id", + [](osf::Writer& writer, const std::string& str) { + return writer.set_metadata_id(str); + }, + R"( + Set the metadata identifier string. + )") + .def( + "metadata_id", + [](osf::Writer& writer) { return writer.metadata_id(); }, + R"( + Return the metadata identifier string. + + Returns (str): + The OSF metadata identifier string. + )") + .def( + "filename", [](osf::Writer& writer) { return writer.filename(); }, + R"( + Return the osf file name. + + Returns (str): + The OSF filename. + )") + .def( + "add_metadata", [](osf::Writer& writer, py::object m) { uint32_t res = 0; if (py::hasattr(m, "type_id")) { std::string type_id = py::cast(py::getattr(m, "type_id")); osf::MetadataEntry* me = m.cast(); - res = writer.addMetadata(*me); + res = writer.add_metadata(*me); } return res; }, @@ -715,10 +808,10 @@ to work with OSF files. *metadata entries* in the file. )") .def( - "saveMessage", + "save_message", [](osf::Writer& writer, uint32_t stream_id, uint64_t ts, py::array_t& buf) { - writer.saveMessage(stream_id, osf::ts_t{ts}, getvector(buf)); + writer.save_message(stream_id, osf::ts_t{ts}, getvector(buf)); }, py::arg("stream_id"), py::arg("ts"), py::arg("buffer"), R"( Low-level save message routine. @@ -727,10 +820,10 @@ to work with OSF files. without any further checks. )") .def( - "saveMessage", + "save_message", [](osf::Writer& writer, uint32_t stream_id, uint64_t ts, py::buffer& buf) { - writer.saveMessage(stream_id, osf::ts_t{ts}, getvector(buf)); + writer.save_message(stream_id, osf::ts_t{ts}, getvector(buf)); }, py::arg("stream_id"), py::arg("ts"), py::arg("buffer"), R"( Low-level save message routine. @@ -738,8 +831,126 @@ to work with OSF files. Directly saves the message `buffer` with `id` and `ts` (ns) without any further checks. )") + .def( + "add_sensor", + [](osf::Writer& writer, const sensor::sensor_info& info, + const std::map& field_types) { + LidarScanFieldTypes ft{}; + for (const auto& f : field_types) { + auto dtype = py::dtype::from_args(f.second); + ft.push_back( + std::make_pair(f.first, field_type_of_dtype(dtype))); + } + return writer.add_sensor(info, ft); + }, + py::arg("info"), + py::arg("field_types") = std::map{}, + R"( + Add a sensor to the OSF file. + + Args: + info (sensor_info): Sensor to add. + field_types (Dict[ChanField, FieldDType]): The fields from scans to + actually save into the OSF. If not provided uses the fields from + the first saved lidar scan for each stream. This parameter is optional. + + Returns (int): + The stream index to use to write scans to this sensor. + + )") .def("close", &osf::Writer::close, - "Finish OSF file and flush everything on disk."); + "Finish OSF file and flush everything on disk.") + .def( + "is_closed", [](osf::Writer& writer) { return writer.is_closed(); }, + R"( + Return the closed status of the writer. + + Returns (bool): + The closed status of the writer. + + )") + .def( + "save", + [](osf::Writer& writer, uint32_t stream_index, + const LidarScan& scan) { writer.save(stream_index, scan); }, + py::arg("stream_index"), py::arg("scan"), + R"( + Save a lidar scan to the OSF file. + + Args: + stream_index (int): The index of the corrosponding + sensor_info to use. + scan (LidarScan): The scan to save. + + )") + .def( + "save", + [](osf::Writer& writer, const std::vector& scans) { + writer.save(scans); + }, + py::arg("scan"), + R"( + Save a set of lidar scans to the OSF file. + + Args: + scans (List[LidarScan]): The scans to save. This will correspond + to the list of sensor_infos. + + )") + .def( + "sensor_info", + [](osf::Writer& writer) { return writer.sensor_info(); }, + R"( + Return the sensor info list. + + Returns (List[sensor_info]): + The sensor info list. + + )") + .def( + "sensor_info", + [](osf::Writer& writer, uint32_t stream_index) { + return writer.sensor_info(stream_index); + }, + py::arg("stream_index"), + R"( + Return the sensor info of the specifed stream_index. + + Args: + stream_index (in): The index of the sensor to return + info about. + + Returns (sensor_info): + The correct sensor info + + )") + .def( + "sensor_info_count", + [](osf::Writer& writer) { return writer.sensor_info_count(); }, + R"( + Return the number of sensor_info objects. + + Returns (int): + The number of sensor_info objects. + + )") + .def( + "__enter__", [](osf::Writer* writer) { return writer; }, + R"( + Allow Writer to work within `with` blocks. + )") + .def( + "__exit__", + [](osf::Writer& writer, pybind11::object& /*exc_type*/, + pybind11::object& /*exc_value*/, + pybind11::object& /*traceback*/) { + writer.close(); + + return py::none(); + }, + R"( + Allow Writer to work within `with` blocks. + )"); m.def("slice_and_cast", &ouster::osf::slice_with_cast, py::arg("lidar_scan"), py::arg("field_types"), diff --git a/python/src/cpp/_pcap.cpp b/python/src/cpp/_pcap.cpp index 20b777e6..707f9e9b 100644 --- a/python/src/cpp/_pcap.cpp +++ b/python/src/cpp/_pcap.cpp @@ -230,10 +230,15 @@ This module is generated from the C++ code and not meant to be used directly. .def(py::init()) .def("frame_count", &PcapIndex::frame_count) .def("seek_to_frame", &PcapIndex::seek_to_frame) - .def_readonly("frame_indices", &PcapIndex::frame_indices_); + .def_readonly("frame_indices", &PcapIndex::frame_indices_) + .def_readonly("frame_timestamp_indices", + &PcapIndex::frame_timestamp_indices_) + .def_readonly("frame_id_indices", &PcapIndex::frame_id_indices_); py::class_(m, "IndexedPcapReader") .def(py::init&>()) + .def(py::init&>()) .def("current_info", &IndexedPcapReader::current_info) .def("next_packet", &IndexedPcapReader::next_packet) .def("update_index_for_current_packet", @@ -247,6 +252,9 @@ This module is generated from the C++ code and not meant to be used directly. }) .def("reset", &IndexedPcapReader::reset) // TODO move to PcapReader binding? + .def("seek", + &IndexedPcapReader::seek) // TODO move to PcapReader binding? + .def("build_index", &IndexedPcapReader::build_index) .def("get_index", &IndexedPcapReader::get_index) .def("current_data", [](IndexedPcapReader& reader) -> py::array { uint8_t* data = const_cast(reader.current_data()); diff --git a/python/src/cpp/_viz.cpp b/python/src/cpp/_viz.cpp index 14b74b00..95926773 100644 --- a/python/src/cpp/_viz.cpp +++ b/python/src/cpp/_viz.cpp @@ -126,6 +126,14 @@ PYBIND11_MODULE(_viz, m) { .def("visible", &viz::PointViz::visible, "Toggle if the PointViz window is visible") + .def("update_on_input", + py::overload_cast<>(&viz::PointViz::update_on_input), + "Will we update on user input.") + + .def("update_on_input", + py::overload_cast(&viz::PointViz::update_on_input), + "Set update_on_input state.") + // misc .def( "push_key_handler", @@ -564,7 +572,8 @@ PYBIND11_MODULE(_viz, m) { [](viz::Cloud& self, py::dict) { return viz::Cloud{self}; }) .def("__repr__", [](const viz::Cloud& self) { std::stringstream ss; - ss << ""; return ss.str(); }); @@ -827,18 +836,30 @@ PYBIND11_MODULE(_viz, m) { m.attr("spezia_palette") = py::array_t{ {static_cast(viz::spezia_n), static_cast(3)}, &viz::spezia_palette[0][0]}; + m.attr("spezia_cal_ref_palette") = py::array_t{ + {static_cast(viz::spezia_cal_ref_n), static_cast(3)}, + &viz::spezia_cal_ref_palette[0][0]}; m.attr("calref_palette") = py::array_t{ {static_cast(viz::calref_n), static_cast(3)}, &viz::calref_palette[0][0]}; m.attr("grey_palette") = py::array_t{ {static_cast(viz::grey_n), static_cast(3)}, &viz::grey_palette[0][0]}; + m.attr("grey_cal_ref_palette") = py::array_t{ + {static_cast(viz::grey_cal_ref_n), static_cast(3)}, + &viz::grey_cal_ref_palette[0][0]}; m.attr("viridis_palette") = py::array_t{ {static_cast(viz::viridis_n), static_cast(3)}, &viz::viridis_palette[0][0]}; + m.attr("viridis_cal_ref_palette") = py::array_t{ + {static_cast(viz::viridis_cal_ref_n), static_cast(3)}, + &viz::viridis_cal_ref_palette[0][0]}; m.attr("magma_palette") = py::array_t{ {static_cast(viz::magma_n), static_cast(3)}, &viz::magma_palette[0][0]}; + m.attr("magma_cal_ref_palette") = py::array_t{ + {static_cast(viz::magma_cal_ref_n), static_cast(3)}, + &viz::magma_cal_ref_palette[0][0]}; m.attr("__version__") = ouster::SDK_VERSION; } diff --git a/python/src/ouster/cli/core/__init__.py b/python/src/ouster/cli/core/__init__.py index ed08bd9b..007024ca 100644 --- a/python/src/ouster/cli/core/__init__.py +++ b/python/src/ouster/cli/core/__init__.py @@ -14,13 +14,9 @@ from typing import Optional, List, Mapping -from ouster.client import ClientError, init_logger +from ouster.sdk.client import ClientError, init_logger -from .pcap import pcap_group -from .sensor import sensor_group from .util import util_group -from .osf import osf_group - this_package_name = 'ouster-sdk' APP_NAME = 'ouster' @@ -106,18 +102,9 @@ def cli(ctx, trace: bool, sdk_log_level: Optional[str]) -> None: init_logger(ctx.obj['SDK_LOG_LEVEL']) -# pcap commands -cli.add_command(pcap_group) - -# sensor commands -cli.add_command(sensor_group) - # util commands cli.add_command(util_group) -# osf commands -cli.add_command(osf_group) - # from https://github.com/python/importlib_metadata, Apache 2.0 license def packages_distributions() -> Mapping[str, List[str]]: @@ -223,8 +210,9 @@ def run(args=None) -> None: logger.debug(platform.python_version() + " : " + " ".join(sys.argv)) try: + exit_code = 0 find_plugins(TRACEBACK_FLAG in sys.argv) - exit_code = cli.main(args=args, standalone_mode=False) + cli.main(args=args, standalone_mode=False) except click.Abort: print('Aborted!') logger.debug('Aborted!') diff --git a/python/src/ouster/cli/core/osf.py b/python/src/ouster/cli/core/osf.py deleted file mode 100644 index 81e362de..00000000 --- a/python/src/ouster/cli/core/osf.py +++ /dev/null @@ -1,310 +0,0 @@ -import click - -from typing import Iterator, Dict, cast, Optional, List, Union -import numpy as np -import logging - - -HAS_MULTI = False -try: - from ouster.sdkx.multi import collate_scans # type: ignore - from ouster.sdkx.osf.multi import ScansMultiReader # type: ignore - from ouster.sdkx.multi_viz import MultiLidarScanViz # type: ignore - HAS_MULTI = True -except ImportError as e: - logging.debug(e) - - -@click.group(name="osf", hidden=True) -@click.pass_context -def osf_group(ctx) -> None: - """Commands for working with OSF files and converting data to OSF.""" - try: - from ouster.osf import _osf - except ImportError as e: - raise click.ClickException("Error: " + str(e)) - ctx.ensure_object(dict) - sdk_log_level = ctx.obj.get('SDK_LOG_LEVEL', None) - if sdk_log_level: - _osf.init_logger(sdk_log_level) - - -@osf_group.command(name='info') # type: ignore -@click.argument('file', required=True, type=click.Path(exists=True)) -@click.option('-s', '--short', is_flag=True, help='Print less metadata info') -@click.pass_context -def osf_info(ctx, file: str, short: bool) -> None: - """Print information about an OSF file to stdout. - - Parses all metadata entries, output is in JSON format. - """ - try: - from ouster.osf import _osf - except ImportError as e: - raise click.ClickException("Error: " + str(e)) - - if not ctx.obj.get('SDK_LOG_LEVEL', None): - # If not SDK_LOG_LEVEL passed we set to "error" logging so to ensure - # that json output is not interferred with other SDK logging messages - # and thus ruining valid json structure - _osf.init_logger("error") - - print(_osf.dump_metadata(file, not short)) - - -@osf_group.command(name='parse') # type: ignore -@click.argument('file', - required=True, - type=click.Path(exists=True, dir_okay=False)) -@click.option('-d', '--decode', is_flag=True, help="Decode messages") -@click.option('-v', - '--verbose', - is_flag=True, - help="Verbose LidarScan outputs (only when used with -d option)") -@click.option('-r', - '--check-raw-headers', - is_flag=True, - help="Check RAW_HEADERS fields by reconstructing lidar_packets" - " and batching LidarScan back (without fields data) and compare." - "(applies only when used with -d option)") -@click.option('-s', - '--standard', - is_flag=True, - help="Show standard layout with chunks") -def osf_parse(file: str, decode: bool, verbose: bool, check_raw_headers: bool, - standard: bool) -> None: - """ - Read an OSF file and print messages type, timestamp and counts to stdout. - Useful to check chunks layout and decoding of all known messages (-d option). - """ - try: - from ouster import client - import ouster.osf as osf - except ImportError as e: - raise click.ClickException("Error: " + str(e)) - - # NOTE[pb]: Mypy quirks or some of our Python packages structure quirks, idk :( - from ouster.client._client import get_field_types - from ouster.sdkx.parsing import scan_to_packets, packets_to_scan, cut_raw32_words # type: ignore - - reader = osf.Reader(file) - - orig_layout = "STREAMING" if reader.has_stream_info else "STANDARD" - - print(f"filename: {file}, layout: {orig_layout}") - - # map stream_id to metadata entry - scan_stream_sensor: Dict[int, osf.LidarSensor] - scan_stream_sensor = {} - for scan_stream_id, scan_stream_meta in reader.meta_store.find( - osf.LidarScanStream).items(): - scan_stream_sensor[scan_stream_id] = reader.meta_store[ - scan_stream_meta.sensor_meta_id] - - ls_cnt = 0 - other_cnt = 0 - - def proc_msgs(msgs: Iterator[osf.MessageRef]): - nonlocal ls_cnt, other_cnt, decode - for m in msgs: - if m.of(osf.LidarScanStream): - prefix = "Ls" - ls_cnt += 1 - else: - prefix = "UN" - other_cnt += 1 - d = "" - verbose_str = "" - if decode: - obj = m.decode() - d = "[D]" if obj else "" - if m.of(osf.LidarScanStream): - ls = cast(client.LidarScan, obj) - - d = d + (" [poses: YES]" if client.poses_present(ls) else "") - - if verbose: - verbose_str += f"{ls}" - - if check_raw_headers: - d = d + " " if d else "" - if client.ChanField.RAW_HEADERS in ls.fields: - sinfo = scan_stream_sensor[m.id].info - - # roundtrip: LidarScan -> packets -> LidarScan - packets = scan_to_packets(ls, sinfo) - - # recovered lidar scan - field_types = get_field_types(ls) - ls_rec = packets_to_scan(packets, sinfo, fields=field_types) - - ls_no_raw32 = cut_raw32_words(ls) - ls_rec_no_raw32 = cut_raw32_words(ls_rec) - - assert ls_rec_no_raw32 == ls_no_raw32, "LidarScan should be" \ - " equal when recontructed from RAW_HEADERS fields" \ - " packets back" - - d += "[RAW_HEADERS: OK]" - else: - d += "[RAW_HEADERS: NONE]" - - print(f" {prefix}\tts: {m.ts}\t\tstream_id: {m.id}\t{d}") - if verbose_str: - print(60 * '-') - print(f"{verbose_str}") - print(60 * '-') - - if not standard and reader.has_stream_info: - proc_layout = "STREAMING" - proc_msgs(reader.messages()) - else: - proc_layout = "STANDARD" - for chunk in reader.chunks(): - print(f"Chunk [{chunk.offset}\t\t]: start_ts = {chunk.start_ts}, " - f"end_ts = {chunk.end_ts}") - proc_msgs(iter(chunk)) - - showed_as_str = "" - if orig_layout != proc_layout: - showed_as_str = f"showed as: {proc_layout}" - - print() - print(f"SUMMARY: [layout: {orig_layout}] {showed_as_str}") - print(f" lidar_scan (Ls) count = {ls_cnt}") - print(f" other count = {other_cnt}") - - -@osf_group.command(name="viz") -@click.argument("file", - required=True, - type=click.Path(exists=True, dir_okay=False)) -@click.option('-e', - '--on-eof', - default='loop', - type=click.Choice(['loop', 'stop', 'exit']), - help="Loop, stop, or exit after reaching end of file") -@click.option("-p", "--pause", is_flag=True, help="Pause at first lidar scan") -@click.option("--pause-at", - default=-1, - help="Lidar Scan number to pause") -@click.option("-r", "--rate", default=1.0, help="Playback rate") -@click.option("--extrinsics", - type=float, - required=False, - nargs=16, - help="Lidar sensor extrinsics to use in viz (instead possible " - " extrinsics stored in OSF)") -@click.option("--skip-extrinsics", - is_flag=True, - help="Don't use any extrinsics (leaves them at Identity)") -@click.option("-s", - "--start-ts", - type=int, - required=False, - default=0, - help="Viz from the provided start timestamp (nanosecs)") -@click.option("--sensor-id", - type=int, - required=False, - default=0, - help="Viz only the single sensor by sensor_id") -@click.option("--multi", - is_flag=True, - help="Use multi sensor viz") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -def osf_viz(file: str, on_eof: str, pause: bool, pause_at: int, rate: float, - extrinsics: Optional[List[float]], skip_extrinsics: bool, - start_ts: int, sensor_id: int, multi: bool, accum_num: int, - accum_every: Optional[int], accum_every_m: Optional[float], - accum_map: bool, accum_map_ratio: float) -> None: - """Visualize Lidar Scan Data from an OSF file. - - Only one LidarScan stream will be shown, unless ``--multi`` is set. - """ - try: - import ouster.osf as osf - from ouster.viz import SimpleViz, LidarScanViz, scans_accum_for_cli - except ImportError as e: - raise click.ClickException(str(e)) - - if pause and pause_at == -1: - pause_at = 0 - - if rate not in SimpleViz._playback_rates: - raise click.ClickException("Invalid rate specified") - - if not HAS_MULTI and multi: - raise click.ClickException("--multi is not supported in this version.") - - # TODO[pb]: Switch to aligned Protocol/Interfaces that we - # should get after some refactoring/designing - scans_source: Union[osf.Scans, ScansMultiReader] - ls_viz: Union[LidarScanViz, MultiLidarScanViz] - - if not multi: - scans_source = osf.Scans(file, - cycle=(on_eof == 'loop'), - start_ts=start_ts, - sensor_id=sensor_id) - - # overwrite extrinsics of a sensor stored in OSF if --extrinsics arg is - # provided - if extrinsics and not skip_extrinsics: - scans_source.metadata.extrinsic = np.array(extrinsics).reshape((4, 4)) - print(f"Overwriting sensor extrinsics to:\n" - f"{scans_source.metadata.extrinsic}") - - if skip_extrinsics: - scans_source.metadata.extrinsic = np.eye(4) - print(f"Setting all sensor extrinsics to " - f"Identity:\n{scans_source.metadata.extrinsic}") - - ls_viz = LidarScanViz(scans_source.metadata) - - scans = scans_source - - elif HAS_MULTI: - # Multi sensor viz - reader = osf.Reader(file) - scans_source = ScansMultiReader(reader, - cycle=(on_eof == 'loop'), - start_ts=start_ts) - - for idx, (sid, _) in enumerate(scans_source._sensors): - scans_source .metadata[idx].hostname = f"sensorid: {sid}" - - ls_viz = MultiLidarScanViz(scans_source.metadata, source_name=file) - - scans = collate_scans(scans_source) # type: ignore - - scans_accum = scans_accum_for_cli(scans_source.metadata, - accum_num=accum_num, - accum_every=accum_every, - accum_every_m=accum_every_m, - accum_map=accum_map, - accum_map_ratio=accum_map_ratio) - - SimpleViz(ls_viz, - rate=rate, - pause_at=pause_at, - on_eof=on_eof, - scans_accum=scans_accum).run(scans) - - click.echo("Done") diff --git a/python/src/ouster/cli/core/pcap.py b/python/src/ouster/cli/core/pcap.py deleted file mode 100644 index 8da8aaff..00000000 --- a/python/src/ouster/cli/core/pcap.py +++ /dev/null @@ -1,962 +0,0 @@ -# type: ignore -from datetime import datetime -import itertools -import os -import logging -from typing import Optional, List, Tuple - -import numpy as np - -import click -from more_itertools import side_effect, consume -from prettytable import PrettyTable, PLAIN_COLUMNS # type: ignore -from textwrap import indent - -from ouster import client -from ouster.cli.core.sensor import configure_sensor -from ouster.sdk.util import resolve_metadata -import ouster.sdk.pose_util as pu -from ouster.sdkx import packet_iter -from ouster.sdkx.parsing import default_scan_fields -from ouster.sdkx.util import resolve_extrinsics -from .util import (click_ro_file, import_rosbag_modules) - - -HAS_MULTI = False -try: - from ouster.sdk.util import resolve_metadata_multi - from ouster.sdkx.multi import PcapMulti, ScansMulti, collate_scans - from ouster.sdkx.multi_viz import MultiLidarScanViz - HAS_MULTI = True -except ImportError as e: - logging.debug(e) - - -@click.group(name="pcap", hidden=True) -def pcap_group() -> None: - """Commands for working with pcap files.""" - pass - - -def match_metadata_with_data_stream(all_infos, meta): - try: - return [k for k in all_infos.udp_streams.keys() if k.dst_port == meta.udp_port_lidar][0] - except IndexError: - return None - - -def print_stream_table(all_infos): - # format output - table = PrettyTable() - table.field_names = [ - '', 'Src IP', 'Dst IP', 'Src Port', 'Dst Port', 'AF', 'Frag', 'Size', - 'Count' - ] - - def stream_sort(k): - return (list(k)[0].dst_ip, list(k)[0].src_ip, list(k)[0].dst_port) - - for k, v in sorted(all_infos.udp_streams.items(), key=stream_sort): - frag = 'No' if (len(v.fragment_counts) == 1) and (1 in v.fragment_counts) else 'Yes' - - first = True - af_count = len(v.payload_size_counts.items()) - for af_key, af_value in v.ip_version_counts.items(): - size_count = len(v.payload_size_counts.items()) - for size_key, size_value in v.payload_size_counts.items(): - cont = "" - - if (size_count > 1 or af_count > 1): - cont = 'X' if first else '↳' - - table.add_row([ - cont, k.src_ip, k.dst_ip, k.src_port, k.dst_port, af_key, frag, size_key, size_value - ]) - first = False - table.set_style(PLAIN_COLUMNS) - table.align = 'r' - table.align['Src IP'] = 'l' # type: ignore - click.echo(click.style(indent(str(table), ' '), fg='yellow')) - - -@pcap_group.command(name='info') -@click.argument('file', required=True, type=click_ro_file) -@click.option('-n', type=int, default=-1, help="Read only INTEGER packets.") -def pcap_info(file: str, n: int) -> None: - """Print information about a pcap file to stdout.""" - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - pcap_size = os.path.getsize(file) - # read full pcap with progress bar - all_infos = None - with click.progressbar(length=pcap_size, label="Reading pcap:") as bar: - def progress_callback(current, diff, total): - bar.update(diff) - all_infos = pcap._packet_info_stream(file, n, progress_callback, 100) - bar.update(pcap_size) - - encap = { - 0: '', - 1: 'ETHERNET', - 42: 'SLL' - }.get(all_infos.encapsulation_protocol, 'UNKNOWN') - min_datetime = datetime.fromtimestamp(all_infos.timestamp_min) - max_datetime = datetime.fromtimestamp(all_infos.timestamp_max) - duration = max_datetime - min_datetime - - click.echo(f"File size: {pcap_size/(2**20):.2f}M") - click.echo(f"Packets read: {all_infos.total_packets}") - click.echo(f"Encapsulation: {encap}") - click.echo(f"Capture start: {min_datetime}") - click.echo(f"Capture end: {max_datetime}") - click.echo(f"Duration: {duration}") - click.echo("UDP Streams:") - print_stream_table(all_infos) - - -@pcap_group.command(name="record") -@click.argument('hostname', required=True, type=str) -@click.option('-d', - 'dest', - required=False, - default=".", - type=click.Path(exists=True, file_okay=False, writable=True), - help="Directory to output files. Defaults to current dir") -@click.option('-l', '--lidar-port', default=None, type=int, help="Lidar port") -@click.option('-i', '--imu-port', default=None, type=int, help="Imu port") -@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") -@click.option('-n', '--n-frames', type=int, help="number of lidar frames") -@click.option('-s', '--n-seconds', default=0.0, help="max time to record") -@click.option('--chunk-size', default=0, help="split output by size (MB)") -@click.option('-b', '--buf-size', default=640, hidden=True, help="Max packets to buffer") -@click.option('-t', '--timeout', default=1.0, help="Seconds to wait for data") -@click.option('-p', '--prefix', default="", help="Recorded file name prefix") -@click.option('--viz', required=False, is_flag=True, help="Visualize point cloud during recording") -@click.option('--legacy/--non-legacy', - default=False, - help="Use legacy metadata format or not") -@click.option('-x', '--do-not-reinitialize', is_flag=True, default=False, - help="Do not reinitialize (by default it will reinitialize if needed)") -@click.option('-y', '--no-auto-udp-dest', is_flag=True, default=False, - help="Do not automatically set udp_dest (by default it will auto set udp_dest") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -def pcap_record(hostname: str, dest, lidar_port: int, imu_port: int, - filter: bool, n_frames: Optional[int], n_seconds: float, - chunk_size: int, buf_size: int, timeout: float, prefix: str, - viz: bool, legacy: bool, do_not_reinitialize: bool, - no_auto_udp_dest: bool, accum_num: int, - accum_every: Optional[int], accum_every_m: Optional[float], - accum_map: bool, accum_map_ratio: float) -> None: - """Record lidar and IMU packets from a sensor to a pcap file. - - Note: this will currently not configure the sensor or query the sensor for - the port to listen on. You will need to set the sensor port and destination - settings separately. - """ - try: - import ouster.pcap as pcap # noqa: F401 - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - message = "Recording" - if n_frames: - message += f" {n_frames} lidar frames" - if n_seconds: - message += f" for up to {n_seconds} seconds" - else: - message += ", hit ctrl-c to exit" - - config = configure_sensor(hostname, lidar_port, do_not_reinitialize, no_auto_udp_dest) - - click.echo(f"Initializing connection to sensor {hostname} on " - f"lidar port {config.udp_port_lidar} with udp dest '{config.udp_dest}'...") - - source = client.Sensor(hostname, - config.udp_port_lidar, - config.udp_port_imu, - buf_size=buf_size, - timeout=timeout if timeout > 0 else None, - _legacy_format=legacy) - - # fancy automatic file naming - time = datetime.now().strftime("%Y%m%d_%H%M%S") - metadata = source.metadata - base_name = f"{prefix}{metadata.prod_line}_{metadata.fw_rev}_{metadata.mode}_{time}" - meta_path = os.path.join(dest, base_name) + ".json" - - scans_source = None - - try: - click.echo(f"Writing metadata to {meta_path}") - source.write_metadata(meta_path) - packets = packet_iter.RecordingPacketSource( - source, dest, - prefix=prefix, n_seconds=n_seconds, n_frames=n_frames, chunk_size=chunk_size, - lidar_port = config.udp_port_lidar, imu_port = config.udp_port_imu - ) - - click.echo(message) - if viz: - try: - from ouster.viz import SimpleViz, scans_accum_for_cli - except ImportError as e: - raise click.ClickException( - "Please verify that libGL is installed. Error: " + str(e)) - # TODO: deduplicate, handle extrinsics (maybe? not sure this would make sense...) - # enable parsing flags field - field_types = default_scan_fields( - source.metadata.format.udp_profile_lidar, flags=True) - - scans_source = client.Scans(packets, - fields=field_types, - complete=filter) - - scans_accum = scans_accum_for_cli(scans_source.metadata, - accum_num=accum_num, - accum_every=accum_every, - accum_every_m=accum_every_m, - accum_map=accum_map, - accum_map_ratio=accum_map_ratio) - SimpleViz(scans_source.metadata, _buflen=0, - scans_accum=scans_accum).run(scans_source) - - else: - consume(packets) - - except KeyboardInterrupt: - click.echo("\nInterrupted") - finally: - if scans_source is not None and scans_source._timed_out: - click.echo(f"ERROR: Timed out while awaiting new packets from sensor {hostname} " - f"using udp destination {config.udp_dest} on port {config.udp_port_lidar}. " - f"Check your firewall settings and/or ensure that the lidar port " - f"{config.udp_port_lidar} is not being held open.") - source.close() - - -@pcap_group.command(name="viz") -@click.argument('file', required=True, type=click.Path(exists=True)) -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -# TWS 20230627: '--cycle' is a deprecated option and only hidden to prevent breaking scripts that may be using it -@click.option('-c', '--cycle', is_flag=True, help="Loop playback", hidden=True) -@click.option('-e', '--on-eof', default='loop', type=click.Choice(['loop', 'stop', 'exit']), - help="Loop, stop, or exit after reaching end of file") -@click.option('-l', '--lidar-port', default=None, help="Dest. port of lidar data") -@click.option('-i', '--imu-port', default=None, help="Dest. port of imu data") -@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") -@click.option('-b', '--buf', default=50, help="Scans to buffer for stepping") -@click.option('-r', - '--rate', - default=1.0, - help="Playback rate. One of 0, 0.25, 0.5, 0.75, 1.0, 1.5, 2.0, 3.0") -@click.option('--extrinsics', - type=float, - required=False, - nargs=16, - help='Lidar sensor extrinsics to use in viz') -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't " - "match with metadata") -@click.option("-p", "--pause", is_flag=True, help="Pause after the first scan") -@click.option("--pause-at", - default=-1, - help="Lidar Scan number to pause") -@click.option('--multi', - is_flag=True, - hidden=not HAS_MULTI, - help='Turn on multi sensor pcap handling and metadata resolutions') -@click.option('--timeout', - type=float, - default=10.0, - help="Timeout in seconds, after which the script will terminate " - "if no lidar data is encountered in the PCAP file") -@click.option('--kitti-poses', - required=False, - type=click_ro_file, - help="Poses file in Kitti format, one pose per scan " - "(can be generated by kiss-icp)") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -def pcap_viz(file: str, meta: Optional[str], cycle: bool, on_eof: str, - lidar_port: Optional[int], imu_port: Optional[int], filter: bool, - buf: int, rate: float, extrinsics: Optional[List[float]], - soft_id_check: bool, pause: bool, pause_at: int, multi: bool, - timeout: float, kitti_poses: Optional[str], accum_num: int, - accum_every: Optional[int], accum_every_m: Optional[float], - accum_map: bool, accum_map_ratio: float) -> None: - """Visualize data from a pcap file. - - To correctly visualize a pcap containing multiple UDP streams, you must - specify a destination port. All packets recorded with a different - destination port will be filtered out. - """ - if timeout <= 0.0: - timeout = None - - try: - import ouster.pcap as pcap - except ImportError as e: - raise click.ClickException( - "Please verify that libpcap is installed. Error: " + str(e)) - - try: - from ouster.viz import SimpleViz, LidarScanViz, scans_accum_for_cli - except ImportError as e: - raise click.ClickException( - "Please verify that libGL is installed. Error: " + str(e)) - - if not HAS_MULTI and multi: - raise click.ClickException("--multi is not supported in this version.") - - if pause and pause_at == -1: - pause_at = 0 - - if rate not in SimpleViz._playback_rates: - raise click.ClickException("Invalid rate specified") - - if not multi: - # Single sensor pcap handling - - # the only reason why we can't always use PcapMulti is that we still - # want to pass custom lidar_port and imu_port from command line (for - # now at least) - # TODO[pb]: Decide when we can remove the custom lidar_port/imu_port - # params from command line and switch everything to a - # single PcapMulti source (it will simplify branching in - # pcap_viz) - - meta = resolve_metadata(file, meta) - if not meta: - raise click.ClickException( - "File not found, please specify a metadata file with `-m`") - with open(meta) as json: - click.echo(f"Reading metadata from: {meta}") - info = client.SensorInfo(json.read()) - - source = pcap.Pcap(file, - info, - lidar_port=lidar_port, - imu_port=imu_port, - loop=(on_eof == 'loop'), - _soft_id_check=soft_id_check) - - # Handle extrinsics, for single sensor source - ext_found = False - if extrinsics: - source.metadata.extrinsic = np.array(extrinsics).reshape((4, 4)) - else: - # Lookup for known extrinsics - ext_results = resolve_extrinsics(data_path=file, - infos=[source.metadata]) - if ext_results and ext_results[0]: - source.metadata.extrinsic = ext_results[0][0] - ext_found = True - - if extrinsics or ext_found: - print(f"Using sensor extrinsics:\n{source.metadata.extrinsic}") - - # enable parsing flags field - field_types = default_scan_fields( - source.metadata.format.udp_profile_lidar, flags=True) - - scans_source = client.Scans(source, - fields=field_types, - complete=filter, - timeout=timeout) - - ls_viz = LidarScanViz(scans_source.metadata) - - if kitti_poses: - scans = pu.pose_scans_from_kitti(scans_source, kitti_poses) - else: - scans = iter(scans_source) - - elif HAS_MULTI and multi: - # Multi sensor pcap handling - - metadata_paths = resolve_metadata_multi(file) - if not metadata_paths: - raise click.ClickException( - "Metadata jsons not found. Make sure that metadata json files " - "have common prefix with a PCAP file") - - source = PcapMulti(file, - metadata_paths=metadata_paths, - _soft_id_check=soft_id_check, - _resolve_extrinsics=True) - - # print extrinsics if any were found - for ext_source, m in zip(source.extrinsics_source, - source._metadata): - if ext_source: - print(f"Found extrinsics for {m.sn} " - f"(from {ext_source}):\n{m.extrinsic}") - - # enable parsing flags field - field_types = [ - default_scan_fields(m.format.udp_profile_lidar, flags=True) - for m in source.metadata - ] - - # set sensor names as idx in the source - for idx, m in enumerate(source.metadata): - source.metadata[idx].hostname = f"sensoridx: {idx}" - - ls_viz = MultiLidarScanViz(source.metadata, source_name=file) - - scans_source = ScansMulti(source, - fields=field_types, - complete=filter) - - scans = collate_scans(scans_source, use_unsynced=True) - - scans_accum = scans_accum_for_cli(scans_source.metadata, - accum_num=accum_num, - accum_every=accum_every, - accum_every_m=accum_every_m, - accum_map=accum_map, - accum_map_ratio=accum_map_ratio) - - SimpleViz(ls_viz, rate=rate, pause_at=pause_at, on_eof=on_eof, - _buflen=buf, scans_accum=scans_accum).run(scans) - - if type(scans_source) is client.Scans and (scans_source._timed_out or scans_source._scans_produced == 0): - click.echo(click.style( - f"\nERROR: no frames matching the provided metadata '{meta}' were found in '{file}'.", - fg='yellow' - )) - all_infos = pcap._packet_info_stream(file, scans_source._packets_consumed, None, 100) - matched_stream = match_metadata_with_data_stream(all_infos, source.metadata) - if not matched_stream: - click.echo(click.style( - "No UDP stream in the data file has a destination port " - f"of {source.metadata.udp_port_lidar}, " - "which is the port specified in the metadata file.\n", fg='yellow')) - click.echo(click.style("The packets read contained the following data streams:", fg='yellow')) - # TODO: check packet sizes and print appropriate errors if there's a mismatch - print_stream_table(all_infos) - if source._errors: - click.echo(click.style("Packet errors were detected in the dataset:", fg='yellow')) - for k, v in source._errors.items(): - click.echo(click.style(f" {str(k)}, count={v}", fg='yellow')) - - if hasattr(source, 'id_error_count') and source.id_error_count and not soft_id_check: - click.echo(click.style("NOTE: To disable strict init_id/sn checking use " - "--soft-id-check option (may lead to parsing " - "errors)", fg='yellow')) - - click.echo("Done") - - -@pcap_group.command(name="slice") -@click.argument('file', type=click_ro_file) -@click.option('-s', '--start-frame', default=0, help="Start frame index") -@click.option('-n', '--num-frames', default=10, help="Number of frames") -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-l', - '--lidar-port', - type=int, - default=None, - help="Dest. port of lidar data") -@click.option('-i', '--imu-port', type=int, default=None, help="Dest. port of imu data") -@click.option('-o', '--output', default=None, type=click.Path(exists=False)) -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't " - "match with metadata") -def pcap_slice(file: str, start_frame: int, num_frames: int, - meta: Optional[str], lidar_port: Optional[int], - imu_port: Optional[int], output: Optional[str], - soft_id_check: bool) -> None: - pcap_slice_impl(file, start_frame, num_frames, meta, lidar_port, - imu_port, output, soft_id_check) - - -def pcap_slice_impl(file: str, start_frame: int, num_frames: int, - meta: Optional[str], lidar_port: Optional[int], - imu_port: Optional[int], output: Optional[str], - soft_id_check: bool) -> None: - """Truncate a pcap file to the specified frames.""" - - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - meta = resolve_metadata(file, meta) - if not meta: - raise click.ClickException( - "File not found, please specify a metadata file with `-m`") - with open(meta) as json: - click.echo(f"Reading metadata from: {meta}") - info = client.SensorInfo(json.read()) - - source = pcap.Pcap(file, - info, - lidar_port=lidar_port, - imu_port=imu_port, - _soft_id_check=soft_id_check) - - frames = packet_iter.ichunked_framed(source) - sel_frames = itertools.islice(frames, start_frame, - start_frame + num_frames) - - default_output = f"./slice_{start_frame}-{num_frames}_{os.path.basename(file)}" - new_pcap_path = output or default_output - click.echo(f"Writing: {new_pcap_path}") - - try: - with click.progressbar(sel_frames, length=num_frames, - label="Progress:") as prog_frames: - # TODO[pb]: pass lidar_port and imu_port as they are read from Pcap - # source? - pcap.record(itertools.chain.from_iterable(prog_frames), - new_pcap_path) - finally: - if source.id_error_count: - print(f"WARNING: {source.id_error_count} lidar_packets with " - "mismatched init_id/sn were detected.") - if not soft_id_check: - print("NOTE: To disable strict init_id/sn checking use " - "--soft-id-check option (may lead to parsing " - "errors)") - - -@pcap_group.command(name="to_bag") -@click.argument('file', required=True) -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-l', '--lidar-port', default=None, type=int, help="Dest. port of lidar data") -@click.option('-i', '--imu-port', default=None, type=int, help="Dest. port of imu data") -@click.option('-o', '--output', required=False, help="BAG output filename") -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't " - "match with metadata") -def pcap_to_bag(file: str, meta: Optional[str], lidar_port: Optional[int], - imu_port: Optional[int], output: Optional[str], - soft_id_check: bool) -> None: - return pcap_to_bag_impl(file, meta, lidar_port, imu_port, output, soft_id_check) - - -def pcap_to_bag_impl(file: str, meta: Optional[str], lidar_port: Optional[int], - imu_port: Optional[int], output: Optional[str], - soft_id_check: bool) -> None: - """Convert pcap to bag. - - Requires the active ROS environment or ROS-less rospy/rosbag python - modules installed. See error message for details. - """ - - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - # Checks only ros imports availability - import_rosbag_modules(raise_on_fail=True) - - from ouster.sdkx.bag import PacketMsg - - import rosbag - import rospy - - try: - meta = resolve_metadata(file, meta) - if not meta: - raise click.ClickException( - "File not found, please specify a metadata file with `-m`") - with open(meta) as json: - click.echo(f"Reading metadata from: {meta}") - info = client.SensorInfo(json.read()) - except click.ClickException: - # if meta can't be resolved we can work without one, so - # creating a defaut info to enable pcap.Pcap() call below - info = client.SensorInfo.from_default(client.LidarMode.MODE_1024x10) - - if not output: - output = os.path.splitext(os.path.basename(file))[0] + '.bag' - - print("Converting: ") - print(f" PCAP file: {file}") - print(f" with json file: {meta}") - print(f" to BAG file: {output}") - - source: client.PacketSource - source = pcap.Pcap(file, - info, - lidar_port=lidar_port, - imu_port=imu_port, - _soft_id_check=soft_id_check) - - # Get info from Pcap source of guessed port to show to the user. - # yes, using some private members here, but it's useful to see what was guessed - if source.ports[0]: - lidar_port = source.ports[0] - if source.ports[1]: - imu_port = source.ports[1] - - print("\nUsing sensor data:") - print(f" lidar_port = {lidar_port}, imu_port = {imu_port}") - - lidar_topic = "/os_node/lidar_packets" - imu_topic = "/os_node/imu_packets" - - ls_cnt = 0 - imu_cnt = 0 - - print("\nConverting PCAP to BAG ... ") - try: - with rosbag.Bag(output, 'w') as outbag: - for packet in source: - ts = rospy.Time.from_sec(packet.capture_timestamp) - msg = PacketMsg(buf=packet._data.tobytes()) - if isinstance(packet, client.LidarPacket): - outbag.write(lidar_topic, msg, ts) - ls_cnt += 1 - elif isinstance(packet, client.ImuPacket): - outbag.write(imu_topic, msg, ts) - imu_cnt += 1 - except KeyboardInterrupt: - print("Interrupted! Finishing up ...") - finally: - print(f"\nSaved to BAG file: {output}") - print(f" LidarPackets : {ls_cnt}\t(topic: {lidar_topic})") - print(f" ImuPackets : {imu_cnt}\t(topic: {imu_topic})") - - if source.id_error_count: - print(f"WARNING: {source.id_error_count} lidar_packets with " - "mismatched init_id/sn were detected.") - if not soft_id_check: - print("NOTE: To disable strict init_id/sn checking use " - "--soft-id-check option (may lead to parsing " - "errors)") - - -@pcap_group.command(name="from_bag") -@click.argument('file', required=True, type=click.Path(exists=True)) -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-l', '--lidar-topic', default="", help="Topic with lidar data") -@click.option('-i', '--imu-topic', default="", help="Topic with imu data") -@click.option('-o', '--output', required=False, help="BAG output filename") -def bag_to_pcap(file: str, meta: Optional[str], lidar_topic: str, - imu_topic: str, output: Optional[str]) -> None: - """Convert bag to pcap. - - Requires the active ROS environment or ROS-less rospy/rosbag python - modules installed. See error message for details. - """ - - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - # Checks only ros imports availability - import_rosbag_modules(raise_on_fail=True) - - from ouster.sdkx.bag import BagSource - - meta = resolve_metadata(file, meta) - if not meta: - raise click.ClickException( - "File not found, please specify a metadata file with `-m`") - with open(meta) as json: - click.echo(f"Reading metadata from: {meta}") - info = client.SensorInfo(json.read()) - - if not output: - output = os.path.splitext(os.path.basename(file))[0] + '.pcap' - - print("Converting: ") - print(f" BAG file: {file}") - print(f" with json file: {meta}") - print(f" to PCAP file: {output}") - - source = BagSource(file, - info, - lidar_topic=lidar_topic, - imu_topic=imu_topic) - - print("\nUsing sensor data:") - print(f" topics = {source.topics}") - - lidar_port = info.udp_port_lidar if info.udp_port_lidar else 7502 - imu_port = info.udp_port_imu if info.udp_port_imu else 7503 - - print("\nUsing output ports:") - print(f" lidar_port = {lidar_port}") - print(f" imu_port = {imu_port}") - - lp_cnt = 0 - imup_cnt = 0 - - def count_packets(packet: client.Packet): - nonlocal lp_cnt, imup_cnt - if isinstance(packet, client.LidarPacket): - lp_cnt += 1 - elif isinstance(packet, client.ImuPacket): - imup_cnt += 1 - - print("\nConverting BAG to PCAP ... ") - packets_written = 0 - keyboard_int = False - try: - # TODO: For a better user experience we may want to add a progress bar - # output for this and other converters commands. - packets_written = pcap.record(side_effect(count_packets, source), - output, - lidar_port=lidar_port, - imu_port=imu_port) - - except KeyboardInterrupt: - print("Interrupted! Finishing up ...") - keyboard_int = True - finally: - print(f"\nSaved to PCAP file: {output}") - if not keyboard_int: - print(f" total packets written: {packets_written}") - print(f" LidarPackets: {lp_cnt}") - print(f" ImuPackets: {imup_cnt}") - source.close() - - -@pcap_group.command(name="to_csv") -@click.argument('file', required=True, type=click.Path(exists=True)) -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('--csv-dir', default=".", help="path to the directory to save csv files") -@click.option('--csv-base', default="csv_out", help="base filename string for pcap output") -@click.option('--csv-ext', default='csv', help="file extension to use, 'csv' by default.") -@click.option('--start-index', default=0, help="index of scan to start outputting") -@click.option('--num-scans', default=1, help="number of scans to save from pcap to csv files") -def pcap_to_csv(file: str, - meta: Optional[str], - num_scans: Optional[int], - start_index: Optional[int], - csv_dir: Optional[str], - csv_base: Optional[str], - csv_ext: Optional[str]) -> None: - """Write scans from a pcap to csv files (one per lidar scan). - - The number of saved lines per csv file is always H x W, which corresponds to - a full 2D image representation of a lidar scan. - - Each line in a csv file is (for DUAL profile): - - TIMESTAMP (ns), RANGE (mm), RANGE2 (mm), SIGNAL (photons), - SIGNAL2 (photons), REFLECTIVITY (%), REFLECTIVITY2 (%), - NEAR_IR (photons), X (m), Y (m), Z (m), X2 (m), Y2 (m), Z2(m), - MEASUREMENT_ID, ROW, COLUMN - - If ``csv_ext`` ends in ``.gz``, the file is automatically saved in - compressed gzip format. :func:`.numpy.loadtxt` can be used to read gzipped - files transparently back to :class:`.numpy.ndarray`. - """ - - # ensure that base csv_dir exists - if not os.path.exists(csv_dir): - os.makedirs(csv_dir) - - output_paths = [os.path.join(csv_dir, f'{csv_base}_{idx:06d}.{csv_ext}') for - idx in range(start_index, start_index + num_scans)] - - pcap_to_csv_impl(file, meta, start_index, num_scans, output_paths) - - -def pcap_to_csv_impl(file: str, - meta: Optional[str], - start_index: Optional[int], - num_scans: Optional[int], - output_names: Optional[List[str]]) -> None: - - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - - metadata_path = resolve_metadata(file, meta) - if not metadata_path: - raise click.ClickException( - "File not found, please specify a metadata file with `-m`") - with open(metadata_path) as json: - click.echo(f"Reading metadata from: {metadata_path}") - metadata = client.SensorInfo(json.read()) - - source = pcap.Pcap(file, metadata) - - dual = False - if metadata.format.udp_profile_lidar == client.UDPProfileLidar.PROFILE_LIDAR_RNG19_RFL8_SIG16_NIR16_DUAL: - dual = True - print("Note: You've selected to convert a dual returns pcap to CSV. Each row " - "will represent a single pixel, so that both returns for that pixel will " - "be on a single row. As this is an example we provide for getting " - "started, we realize that you may have conversion needs which are not met " - "by this function. You can find the source code on the Python SDK " - "documentation website to modify it for your own needs.") - - # construct csv header and data format - def get_fields_info(scan: client.LidarScan) -> Tuple[str, List[str]]: - field_names = 'TIMESTAMP (ns), ROW, DESTAGGERED IMAGE COLUMN, MEASUREMENT_ID' - field_fmts = ['%d'] * 4 - for chan_field in scan.fields: - field_names += f', {chan_field}' - if chan_field in [client.ChanField.RANGE, client.ChanField.RANGE2]: - field_names += ' (mm)' - if chan_field in [client.ChanField.REFLECTIVITY, client.ChanField.REFLECTIVITY2]: - field_names += ' (%)' - if chan_field in [client.ChanField.SIGNAL, client.ChanField.SIGNAL2, - client.ChanField.NEAR_IR]: - field_names += ' (photons)' - field_fmts.append('%d') - field_names += ', X1 (m), Y1 (m), Z1 (m)' - field_fmts.extend(3 * ['%.4f']) - if dual: - field_names += ', X2 (m), Y2 (m), Z2 (m)' - field_fmts.extend(3 * ['%.4f']) - return field_names, field_fmts - - field_names: str = '' - field_fmts: List[str] = [] - - # [doc-stag-pcap-to-csv] - from itertools import islice - # precompute xyzlut to save computation in a loop - xyzlut = client.XYZLut(metadata) - - # create an iterator of LidarScans from pcap and bound it if num is specified - scans = iter(client.Scans(source)) - # if num_scans is None - scans = islice(scans, start_index, start_index + num_scans) - - row_layer = np.fromfunction(lambda i, j: i, - (metadata.format.pixels_per_column, - metadata.format.columns_per_frame), dtype=int) - column_layer = np.fromfunction(lambda i, j: j, - (metadata.format.pixels_per_column, - metadata.format.columns_per_frame), dtype=int) - column_layer_staggered = client.destagger(metadata, column_layer, - inverse=True) - - idx = None - for idx, scan in enumerate(scans): - - # initialize the field names for csv header - if not field_names or not field_fmts: - field_names, field_fmts = get_fields_info(scan) - - # copy per-column timestamps and measurement_ids for each beam - timestamps = np.tile(scan.timestamp, (scan.h, 1)) - measurement_ids = np.tile(scan.measurement_id, (scan.h, 1)) - - # grab channel data - fields_values = [scan.field(ch) for ch in scan.fields] - - frame = np.dstack((timestamps, row_layer, column_layer_staggered, - measurement_ids, *fields_values)) - - # output points in "image" vs. staggered order - frame = client.destagger(metadata, frame) - - # destagger XYZ separately since it has a different type - xyz = xyzlut(scan.field(client.ChanField.RANGE)) - xyz_destaggered = client.destagger(metadata, xyz) - - if dual: - xyz2 = xyzlut(scan.field(client.ChanField.RANGE2)) - xyz2_destaggered = client.destagger(metadata, xyz2) - - # get all data as one H x W x num fields int64 array for savetxt() - frame = np.dstack(tuple(map(lambda x: x.astype(object), - (frame, xyz_destaggered, xyz2_destaggered)))) - - else: - # get all data as one H x W x num fields int64 array for savetxt() - frame = np.dstack(tuple(map(lambda x: x.astype(object), - (frame, xyz_destaggered)))) - - frame_colmajor = np.swapaxes(frame, 0, 1) - - # write csv out to file - csv_path = output_names[idx] - print(f'write frame index #{idx + start_index}, to file: {csv_path}') - - header = '\n'.join([f'frame num: {idx}', field_names]) - - np.savetxt(csv_path, - frame_colmajor.reshape(-1, frame.shape[2]), - fmt=field_fmts, - delimiter=',', - header=header) - # [doc-etag-pcap-to-csv] - if idx is None: - print("No CSVs outputted. Check your start index to ensure that it " - "doesn't start past the total number of frames in your PCAP") - - -@pcap_group.command(hidden=True) -@click.argument('file', required=True, type=click.Path(exists=True)) -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-c', '--cycle', is_flag=True, required=False, type=bool, help='Loop playback') -@click.option('-h', '--host', required=False, type=str, default='127.0.1.1', help='Dest. host of UDP packets') -@click.option('--lidar-port', required=False, type=int, default=7502, help='Dest. port of lidar data') -@click.option('--imu-port', required=False, type=int, default=7503, help='Dest. port of imu data') -def replay(file, meta, cycle, host, lidar_port, imu_port): - """Replay lidar and IMU packets from a PCAP file to a UDP socket.""" - try: - import ouster.pcap as pcap - except ImportError: - raise click.ClickException("Please verify that libpcap is installed") - meta = resolve_metadata(file, meta) - with open(meta) as json: - click.echo(f"Reading metadata from: {meta}") - info = client.SensorInfo(json.read()) - - click.echo(f"Sending UDP packets to host {host}, ports {lidar_port} and {imu_port} - Ctrl-C to exit.") - - def replay_once(): - replay = pcap._replay(file, info, host, lidar_port, imu_port) - consume(replay) - - replay_once() - while cycle: - replay_once() diff --git a/python/src/ouster/cli/core/sensor.py b/python/src/ouster/cli/core/sensor.py deleted file mode 100644 index 07ba5734..00000000 --- a/python/src/ouster/cli/core/sensor.py +++ /dev/null @@ -1,337 +0,0 @@ -# type: ignore -import json -from typing import Optional, List - -import numpy as np -import requests - -import click -from click import ClickException - -import ouster.client as client -from ouster.client._client import SensorConfig -from ouster.sdk.util import firmware_version -from .util import click_ro_file -from copy import copy -from packaging import version - - -MIN_AUTO_DEST_FW = version.Version("2.3.1") - - -@click.group(name="sensor", hidden=True) -def sensor_group() -> None: - """Commands for working with sensors.""" - pass - - -@sensor_group.command() -@click.argument('hostname', required=True) -@click.option('--legacy/--non-legacy', - default=False, - help="Use legacy metadata format or not") -def metadata(hostname: str, legacy: bool) -> None: - """Dump sensor metadata to stdout.""" - try: - click.echo(client.Sensor(hostname, 7502, 7503, - _legacy_format=legacy)._fetched_meta) - except RuntimeError as e: - raise ClickException(str(e)) - - -@sensor_group.command() -@click.argument('hostname', type=str, required=True) -@click.argument('keyval', metavar='[KEY VAL]...', type=str, nargs=-1) -@click.option('-d', 'dump', is_flag=True, help='Dump current configuration') -@click.option('-c', 'file', type=click.File(), help='Read config from file') -@click.option('-u', 'auto', is_flag=True, help='Set automatic udp dest') -@click.option('-p', 'persist', is_flag=True, help='Persist configuration') -@click.option('-s/-n', 'standby', default=None, help='Set STANDBY or NORMAL') -def config(hostname, keyval, dump, file, auto, persist, standby) -> None: - """Manipulate the sensor configuration. - - Update the sensor configuration or dump it to stdout. The first positional - argument is the sensor hostname; remaining arguments are interpreted as - config parameter key/value pairs, for example: - - \b - $ ouster-cli sensor config os-99xxxxxxxxxx \\ - lidar_mode 2048x10 azimuth_window "[20000, 60000]" - - If no options or config param values are specified, use the default UDP - ports, automatic UDP destination, full azimuth azimuth window, and set the - operating mode to NORMAL. - """ - - def parse(s): - """Helper to read cli arg as json value with fallback to string.""" - try: - return json.loads(s) - except json.decoder.JSONDecodeError: - return json.loads(f'"{s}"') - - if dump: - if file or keyval or auto or persist or standby is not None: - raise ClickException("Cannot use other options with `-d` command") - cfg = client.get_config(hostname) - click.echo(cfg) - return - elif file: - if keyval: - raise ClickException("Cannot specify extra config keys with `-c`") - cfg = client.SensorConfig(file.read()) - click.echo("Setting config from file:") - elif not keyval and not auto and standby is None: - auto = True - cfg = client.SensorConfig() - cfg.udp_port_lidar = 7502 - cfg.udp_port_imu = 7503 - cfg.azimuth_window = (0, 360000) - cfg.signal_multiplier = 1 - cfg.operating_mode = client.OperatingMode.OPERATING_NORMAL - click.echo("No config specified; using defaults and auto UDP dest:") - else: - if len(keyval) % 2 != 0: - raise ClickException(f"Unmatched key/value arg: {keyval[-1]}") - d = dict(zip(keyval[::2], map(parse, keyval[1::2]))) - cfg = client.SensorConfig(json.dumps(d)) - click.echo("Updating configuration:") - - if standby is not None: - cfg.operating_mode = (client.OperatingMode.OPERATING_STANDBY if standby - else client.OperatingMode.OPERATING_NORMAL) - - click.echo(f"{cfg}") - try: - client.set_config(hostname, cfg, udp_dest_auto=auto, persist=persist) - except RuntimeError as e: - raise ClickException(str(e)) - - -def auto_detected_udp_dest(hostname: str) -> int: - """ - Function which obtains the udp_dest the sensor would choose when automatically detecting - without changing anything else about sensor state - - Args: - hostname: sensor hostname - Returns: - udp_dest: the udp_dest the sensor detects automatically - """ - orig_config = client.get_config(hostname, active=True) - - # get what the possible auto udp_dest is - config_endpoint = f"http://{hostname}/api/v1/sensor/config" - response = requests.post(config_endpoint, params={'reinit': False, 'persist': False}, - json={'udp_dest': '@auto'}) - response.raise_for_status() - - # get staged config - udp_auto_config = client.get_config(hostname, active=False) - - # set staged config back to original - response = requests.post(config_endpoint, params={'reinit': False, 'persist': False}, - json={'udp_dest': str(orig_config.udp_dest)}) - response.raise_for_status() - - return udp_auto_config.udp_dest - - -def configure_sensor(hostname: str, lidar_port: int, - do_not_reinitialize: bool, no_auto_udp_dest) -> SensorConfig: - """Depending on the args do_not_reinitialize, no_auto_udp_dest, - possibly reconfigure the sensor. Then, return the configuration that is used.""" - - click.echo(f"Contacting sensor {hostname}...") - - fw_version = firmware_version(hostname) - - auto_config_udp_dest = None - use_set_config_auto = False - - # original config - orig_config = client.get_config(hostname, active=True) - - if fw_version >= MIN_AUTO_DEST_FW: - auto_config_udp_dest = auto_detected_udp_dest(hostname) - if orig_config.udp_dest != auto_config_udp_dest: - if no_auto_udp_dest or do_not_reinitialize: - click.echo(f"WARNING: Your sensor's udp destination {orig_config.udp_dest} does " - f"not match the detected udp destination {auto_config_udp_dest}. " - f"If you get a Timeout error, drop -x and -y from your " - f"arguments to allow automatic udp_dest setting.") - else: - if no_auto_udp_dest or do_not_reinitialize: - click.echo("WARNING: You have opted not to allow us to reset your auto UDP dest " - "by using either -x or -y. If you get a Timeout error, drop -x and -y " - "from your arguments to allow automatic udp_dest setting.") - else: - use_set_config_auto = True - - if do_not_reinitialize: - - if orig_config.operating_mode == client.OperatingMode.OPERATING_STANDBY: - raise click.ClickException("Your sensor is in STANDBY mode but you have disallowed " - "reinitialization. Drop -x to allow reinitialization or " - "change your sensor's operating mode.") - - if lidar_port is not None and orig_config.udp_port_lidar != lidar_port: - raise click.ClickException(f"Sensor's lidar port {orig_config.udp_port_lidar} does " - f"not match provided lidar port but you have disallowed " - f"reinitialization. Drop -x to allow reinitialization or " - f"change your specified lidar_port {lidar_port}") - return orig_config - - new_config = copy(orig_config) - if lidar_port is not None and orig_config.udp_port_lidar != lidar_port: - new_config.udp_port_lidar = lidar_port - click.echo((f"Will change lidar port from {orig_config.udp_port_lidar} to " - f"{new_config.udp_port_lidar}...")) - else: - # lidar port from arguments is None - lidar_port = orig_config.udp_port_lidar - - if not no_auto_udp_dest and auto_config_udp_dest and orig_config.udp_dest != auto_config_udp_dest: - click.echo((f"Will change udp_dest from '{orig_config.udp_dest}' to automatically " - f"detected '{auto_config_udp_dest}'...")) - new_config.udp_dest = auto_config_udp_dest - - if use_set_config_auto: - click.echo(f"Will change udp_dest from '{orig_config.udp_dest}' to automatically " - "detected UDP DEST") - new_config.udp_dest = None - - new_config.operating_mode = client.OperatingMode.OPERATING_NORMAL - if new_config.operating_mode != orig_config.operating_mode: - click.echo((f"Will change sensor's operating mode from {orig_config.operating_mode}" - f" to {new_config.operating_mode}")) - - if orig_config != new_config or use_set_config_auto: - click.echo("Setting sensor config...") - client.set_config(hostname, new_config, persist=False, udp_dest_auto = use_set_config_auto) - - new_config = client.get_config(hostname) - - return new_config - - -@sensor_group.command() -@click.argument('hostname', required=True) -@click.option('-b', '--buf-size', default=256, hidden=True, help="Max packets to buffer") -@click.option('-e', '--extrinsics', type=float, nargs=16, - help='Lidar sensor extrinsics to use in viz') -@click.option('-m', '--meta', type=click_ro_file, - help="Provide separate metadata to use with sensor", hidden=True) -@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") -@click.option('-l', '--lidar-port', type=int, default=None, help="Lidar port") -@click.option('-s', '--soft-id-check', is_flag=True, hidden=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.option('-t', '--timeout', default=1.0, help="Seconds to wait for data") -@click.option('-v', '--verbose', is_flag=True, help="Print some debug output") -@click.option('-x', '--do-not-reinitialize', is_flag=True, default=False, - help="Do not reinitialize (by default it will reinitialize if needed)") -@click.option('-y', '--no-auto-udp-dest', is_flag=True, default=False, - help="Do not automatically set udp_dest (by default it will auto set udp_dest") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -def viz(hostname: str, lidar_port: int, meta: Optional[str], filter: bool, - buf_size: int, verbose: bool, timeout: float, - extrinsics: Optional[List[float]], soft_id_check: bool, - do_not_reinitialize: bool, no_auto_udp_dest: bool, accum_num: int, - accum_every: Optional[int], accum_every_m: Optional[float], - accum_map: bool, accum_map_ratio: float) -> None: - """Listen for data on the specified ports and run the visualizer. - - Note: Please pay attention to your firewall and networking configuration. You - may have to disable your firewall for packets to reach the visualizer/client. - """ - try: - from ouster.viz import SimpleViz, scans_accum_for_cli - from ouster.sdkx.parsing import default_scan_fields - except ImportError as e: - raise click.ClickException(str(e)) - - config = configure_sensor(hostname, lidar_port, do_not_reinitialize, no_auto_udp_dest) - - click.echo(f"Initializing connection to sensor {hostname} on " - f"lidar port {config.udp_port_lidar} with udp dest '{config.udp_dest}'...") - - # make 0 timeout in the cli mean no timeout - timeout_ = timeout if timeout > 0 else None - - # override metadata, if provided - meta_override: Optional[client.SensorInfo] = None - if meta is not None: - # warn that they should set _soft_id_check if overriding metadata - if not soft_id_check: - soft_id_check = True - click.echo(f"Setting soft_id_check as you have elected to override sensor's metadata " - f"with metadata {meta}") - - click.echo(f"Will use {meta} to override sensor metadata...") - with open(meta) as json: - meta_override = client.SensorInfo(json.read()) - - source = client.Sensor(hostname, - config.udp_port_lidar, - 7503, # doesn't matter as viz doesn't handle IMU packets - metadata=meta_override, - buf_size=buf_size, - timeout=timeout_, - _soft_id_check=soft_id_check) - - # enable parsing flags field - fields = default_scan_fields(source.metadata.format.udp_profile_lidar, - flags=True) - - try: - scans = client.Scans(source, - timeout=timeout_, - complete=filter, - fields=fields, - _max_latency=2) - - if extrinsics: - scans.metadata.extrinsic = np.array(extrinsics).reshape((4, 4)) - click.echo(f"Using sensor extrinsics:\n{scans.metadata.extrinsic}") - - scans_accum = scans_accum_for_cli(scans.metadata, - accum_num=accum_num, - accum_every=accum_every, - accum_every_m=accum_every_m, - accum_map=accum_map, - accum_map_ratio=accum_map_ratio) - - SimpleViz(scans.metadata, scans_accum=scans_accum).run(scans) - - finally: - if scans._timed_out: - click.echo(f"ERROR: Timed out while awaiting new packets from sensor {hostname} " - f"using udp destination {config.udp_dest} on port {config.udp_port_lidar}. " - f"Check your firewall settings and/or ensure that the lidar port " - f"{config.udp_port_lidar} is not being held open.") - - if source.id_error_count: - click.echo(f"WARNING: {source.id_error_count} lidar_packets with " - "mismatched init_id/sn were detected.") - if not soft_id_check: - click.echo("NOTE: To disable strict init_id/sn checking use " - "--soft-id-check option (may lead to parsing " - "errors)") - - click.echo("Done") diff --git a/python/src/ouster/cli/core/util.py b/python/src/ouster/cli/core/util.py index b9e3845c..ff1bf9b9 100644 --- a/python/src/ouster/cli/core/util.py +++ b/python/src/ouster/cli/core/util.py @@ -16,20 +16,26 @@ from more_itertools import consume import glob from itertools import product +from enum import Enum import json import os import re import time -from typing import Optional, Tuple +from typing import Optional, Tuple, List import tempfile import zipfile import numpy as np from copy import deepcopy -from ouster import client +from ouster.sdk import client from ouster.sdk.util import resolve_metadata +from ouster.sdk.sensor import SensorMultiPacketReader +from ouster.sdk.client import ScansMulti, PacketMultiSource, PacketMultiWrapper + +from ouster.sdk.sensor.util import configure_sensor_multi + DEFAULT_SAMPLE_URL = 'https://data.ouster.io/sdk-samples/OS2/OS2_128_bridge_sample.zip' click_ro_file = click.Path(exists=True, dir_okay=False, readable=True) @@ -148,7 +154,7 @@ def md5file(path: str) -> str: Please verify that ROS Python modules are available. The best option is to try to install unofficial rospy packages that work -with python3.7,3.8 on Ubuntu 18.04/20.04 and Debian 10 without ROS: +with Python 3.8 on Ubuntu 18.04/20.04 and Debian 10 without ROS: pip install --extra-index-url https://rospypi.github.io/simple/ rospy rosbag tf2_ros @@ -204,7 +210,7 @@ def benchmark(file: str, meta: Optional[str], url: Optional[str]) -> None: """ try: - import ouster.pcap as pcap + import ouster.sdk.pcap as pcap except ImportError: raise click.ClickException("Please verify that libpcap is installed") @@ -233,7 +239,7 @@ def benchmark(file: str, meta: Optional[str], url: Optional[str]) -> None: click.echo("Gathering system info...") sys_info = get_system_info() click.echo( - f" cpu: {sys_info.get('cpuinfo',{}).get('brand_raw', 'UNKNOWN')}") + f" cpu: {sys_info.get('cpuinfo', {}).get('brand_raw', 'UNKNOWN')}") click.echo(f" platform: {sys_info['platform']['platform']}") click.echo(f" python: {sys_info['platform']['python_version']}") click.echo(f" ouster-sdk: {sys_info['packages']['ouster-sdk']}") @@ -338,14 +344,15 @@ def convert_metadata_to_legacy(meta: str, output_path: Optional[str]) -> None: if output_path is None: click.echo(output) else: - click.echo(f"Reading metadata from: {meta} and outputting converted legacy metadata to: {output_path}") + click.echo(f"Reading metadata from: {meta} and outputting converted " + f"legacy metadata to: {output_path}") with open(output_path, "w") as outfile: outfile.write(output) @util_group.command(name="benchmark-sensor") @click.argument('hostname', required=True, type=str) -@click.option('-l', '--lidar-port', default=None, help="Lidar port") +@click.option('-l', '--lidar-port', type=int, default=None, help="Lidar port") @click.option('-n', '--n-frames', type=int, @@ -356,6 +363,22 @@ def convert_metadata_to_legacy(meta: str, output_path: Optional[str]) -> None: help="Max time process, default 20.s") @click.option('-b', '--buf-size', default=1280, help="Max packets to buffer") @click.option('-t', '--timeout', default=2.0, help="Seconds to wait for data") +@click.option('-x', + '--do-not-reinitialize', + is_flag=True, + default=False, + help="Do not reinitialize (by default it will reinitialize if " + " needed)") +@click.option('-y', + '--no-auto-udp-dest', + is_flag=True, + default=False, + help="Do not automatically set udp_dest (by default it will auto " + "set udp_dest") +@click.option('--multi', + is_flag=True, + hidden=False, + help='Turn on multi sensor handling') @click.option('--short', required=False, is_flag=True, @@ -386,95 +409,163 @@ def convert_metadata_to_legacy(meta: str, output_path: Optional[str]) -> None: required=False, is_flag=True, help="Don't show scan statuses") -def benchmark_sensor(hostname: str, lidar_port: int, n_frames: Optional[int], - n_seconds: float, buf_size: int, timeout: float, - short: bool, only_range_refl: bool, copy_data: bool, - scan_batch: bool, xyz: bool, xyz_mean: bool, - no_viz: bool) -> None: +def benchmark_sensor(hostname: str, lidar_port: Optional[int], + n_frames: Optional[int], n_seconds: float, buf_size: int, + do_not_reinitialize: bool, no_auto_udp_dest: bool, + multi: bool, timeout: float, short: bool, + only_range_refl: bool, copy_data: bool, scan_batch: bool, + xyz: bool, xyz_mean: bool, no_viz: bool) -> None: """Reads from the sensor and measure packet drops, cpu load etc.""" - from ouster.cli.core.sensor import configure_sensor import psutil as psu - config = configure_sensor(hostname, lidar_port, False, False) + hostnames: List[str] = [x.strip() for x in hostname.split(",") if x.strip()] + + if not multi and len(hostnames) > 1: + click.echo( + f"ERROR: Got {len(hostnames)} sensors but NOT --multi param is " + "specified. Use --multi or a single sensor source.") + return - click.echo(f"Initializing connection to sensor {hostname} on " - f"lidar port {config.udp_port_lidar} with udp dest " - f"'{config.udp_dest}'...") + click.echo(f"Checking sensor configurations for: {hostnames} ...") - source = client.Sensor(hostname, - config.udp_port_lidar, - config.udp_port_imu, - buf_size=buf_size, - timeout=timeout if timeout > 0 else None) + configs = configure_sensor_multi(hostnames, + first_lidar_port=lidar_port, + do_not_reinitialize=do_not_reinitialize, + no_auto_udp_dest=no_auto_udp_dest) - info = source.metadata + ports = [(c.udp_port_lidar, c.udp_port_imu) for c in configs] - packets_per_frame = (info.format.columns_per_frame / - info.format.columns_per_packet) - columns_per_packet = info.format.columns_per_packet + click.echo(f"Starting sensor source for: {hostnames} ...") - xyzlut = client.XYZLut(info) + packet_source: PacketMultiSource + + if not multi: + source = client.Sensor(hostnames[0], + configs[0].udp_port_lidar, + configs[0].udp_port_imu, + buf_size=buf_size, + timeout=timeout if timeout > 0 else None) + else: + source = SensorMultiPacketReader(hostnames, + ports=ports, + buf_size_secs=3.0, + timeout=timeout if timeout > 0 else None, + extrinsics_path=os.getcwd()) + + packet_source = PacketMultiWrapper(source) + + for idx, (conf, meta) in enumerate(zip(configs, packet_source.metadata)): + click.echo(f"sensor [{idx}] = ") + click.echo(f" {'Model':<20}: {meta.prod_line} {meta.fw_rev} {meta.mode}") + click.echo(f" {'SN':<20}: {meta.sn}") + click.echo(f" {'hostname':<20}: {meta.hostname}") + for prop in [ + "udp_dest", "udp_port_lidar", "udp_port_imu", "lidar_mode", + "azimuth_window", "udp_profile_lidar" + ]: + click.echo(f" {prop:<20}: {getattr(conf, prop)}") + + # TODO[pb]: Left here commented for quick test of MultiViz while we don't have + # `ouster-cli sensor viz --multi` implemented + # from ouster.sdk.viz import SimpleViz + # from ouster.sdk.viz.multi_viz import MultiLidarScanViz + # scan_source = ScansMulti(packet_source) + # ls_viz = MultiLidarScanViz(scan_source.metadata, source_name=str(hostnames)) + # scans = iter(scan_source) + # SimpleViz(ls_viz, _buflen=100).run(scans) + # scan_source.close() + # return + + packets_per_frame = [ + (m.format.columns_per_frame / m.format.columns_per_packet) + for m in packet_source.metadata + ] + max_packets_per_frame = max(packets_per_frame) + columns_per_packet = [ + m.format.columns_per_packet for m in packet_source.metadata + ] + + xyzlut = [client.XYZLut(info) for info in packet_source.metadata] fields = None scan_batch_flag = "S" if only_range_refl: # only minimal fields to use in LidarScan and parse in ScanBatch - fields = { + fields = [{ client.ChanField.RANGE: np.uint32, - client.ChanField.REFLECTIVITY: np.uint8 - } + client.ChanField.REFLECTIVITY: np.uint16 + } for _ in packet_source.metadata] scan_batch_flag = "SRR" flags = "N" if not copy_data else "C" + if scan_batch or xyz or xyz_mean: - data_source = client.Scans(source, fields=fields) + data_source = ScansMulti(packet_source, fields=fields) + is_scan_source = True flags = f"{scan_batch_flag}" if not copy_data else f"C {scan_batch_flag}" if xyz_mean: flags += " XYZ M" elif xyz: flags += " XYZ" else: - data_source = source - - # TODO[pb]: This frame_boundry() may need to be extracted because we already - # use it in multiple places. - last_f_id = -1 - - def frame_boundary(p: client.Packet) -> bool: - nonlocal last_f_id - if isinstance(p, client.LidarPacket): - f_id = p.frame_id - changed = last_f_id != -1 and f_id != last_f_id - last_f_id = f_id - return changed - return False + data_source = packet_source + is_scan_source = False - last_scan_ts = time.monotonic() - start_ts = last_scan_ts + frame_bound = [client.FrameBorder() for _ in data_source.metadata] - status_line = "" - status_line_init = "" + now = time.monotonic() + last_scan_ts = [now for _ in data_source.metadata] + start_ts = last_scan_ts.copy() - frames_cnt = 0 - total_packets = 0 + class PacketStatus(Enum): + """State of the packet""" + NONE = 0 + RECEIVED = 1 + OUT_OF_ORDER = 2 + + def __str__(self) -> str: + m = dict({ + PacketStatus.NONE: " ", + PacketStatus.RECEIVED: "=", + PacketStatus.OUT_OF_ORDER: "O" + }) + return m[self] + + def status_str(sl: List[PacketStatus]) -> str: + return "".join([str(ps) for ps in sl]) + + status_line = [[] for _ in data_source.metadata] + status_line_init = [[] for _ in data_source.metadata] + + frames_cnt = [0] * len(data_source.metadata) + total_packets = [0] * len(data_source.metadata) total_packets_in_buf = 0 - missed_packets = 0 + missed_packets = [0] * len(data_source.metadata) + ooo_packets = [0] * len(data_source.metadata) # out-of-order packets total_avg_cpu = 0 total_max_cpu = 0 - click.echo( - f"Receiving data: {info.prod_line}, {info.mode}, " - f"{info.format.udp_profile_lidar}, {info.format.column_window}, " - f"[{flags}] ...") + cpu_percent = 0 + max_cpu_percent = 0 + + for idx, info in enumerate(data_source.metadata): + click.echo( + f"Receiving data [{idx}]: {info.prod_line}, {info.mode}, " + f"{info.format.udp_profile_lidar}, {info.format.column_window}, " + f"[{flags}] ...") # first point from which to measure CPU usage psu.cpu_percent() try: + # this is a bit hacky, but benchmark logic works well with Tuple[int, LidarScan] iterator + # TODO: rework with proper iterator instead + it = data_source._scans_iter() if is_scan_source else iter(data_source) - for obj in data_source: + while True: + idx, obj = next(it) # imu_packets are not accounted if not (isinstance(obj, client.LidarPacket) @@ -484,101 +575,195 @@ def frame_boundary(p: client.Packet) -> bool: if isinstance(obj, client.LidarPacket): # no scan batching branch packet = obj if not copy_data else deepcopy(obj) - packet_num = int(packet.measurement_id[0] / columns_per_packet) - total_packets += 1 - if not frame_boundary(packet): - missed_packets += int(packet_num - len(status_line)) - status_line += " " * int(packet_num - len(status_line)) + "=" + packet_num = int(packet.measurement_id[0] / columns_per_packet[idx]) + + total_packets[idx] += 1 + + if not frame_bound[idx](packet): + pd = int(packet_num - len(status_line[idx])) + if pd >= 0: + missed_packets[idx] += pd + status_line[idx] += ([PacketStatus.NONE] * pd + + [PacketStatus.RECEIVED]) + else: + # recover one missed packet back? + ooo_packets[idx] += 1 + missed_packets[idx] -= 1 + status_line[idx][packet_num] = PacketStatus.OUT_OF_ORDER + continue else: - missed_packets += int(packets_per_frame - len(status_line)) - status_line += " " * int(packets_per_frame - len(status_line)) - status_line_init = " " * packet_num + "=" + pd = int(packets_per_frame[idx] - len(status_line[idx])) + if pd >= 0: + missed_packets[idx] += pd + status_line[idx] += [PacketStatus.NONE] * pd + status_line_init[idx] = ( + [PacketStatus.NONE] * packet_num + + [PacketStatus.RECEIVED]) + else: + click.echo( + f"WARNING: On sensor id [{idx}] possible mismatched metadata with the " + f"lidar packets stream receiving.\n" + f"Expected maximum packet measurement id of " + f"{int(packets_per_frame[idx])} but got at least " + f"{len(status_line[idx])}") elif isinstance(obj, client.LidarScan): # scan batching + xyz + mean branch scan = obj if not copy_data else deepcopy(obj) status = scan.status & 0x1 - status_split = np.array(np.split(status, packets_per_frame)) + status_split = np.array(np.split(status, packets_per_frame[idx])) # any valid column from a packet means that we received the packet status_per_packet = np.any(status_split, axis=1) - status_line = "".join( - ["=" if s else " " for s in status_per_packet]) + + status_line[idx] = [ + PacketStatus.RECEIVED if s else PacketStatus.NONE + for s in status_per_packet + ] valid_packets = np.sum(status_per_packet) - total_packets += valid_packets - missed_packets += int(packets_per_frame - valid_packets) + total_packets[idx] += valid_packets + missed_packets[idx] += int(packets_per_frame[idx] - valid_packets) if xyz or xyz_mean: - xyz_points = xyzlut(scan) + xyz_points = xyzlut[idx](scan) if xyz_mean: np.mean(xyz_points.reshape((-1, 3)), axis=0) now = time.monotonic() - scan_t = now - last_scan_ts + scan_t = now - last_scan_ts[idx] - cpu_percents = psu.cpu_percent(percpu=True) - max_cpu_percent = np.max(cpu_percents) - cpu_percent = np.mean(cpu_percents) - total_avg_cpu += cpu_percent - total_max_cpu += max_cpu_percent + if idx == 0: + cpu_percents = psu.cpu_percent(percpu=True) + max_cpu_percent = np.max(cpu_percents) + cpu_percent = np.mean(cpu_percents) + total_avg_cpu += cpu_percent + total_max_cpu += max_cpu_percent - total_packets_in_buf += source._cli.size + total_packets_in_buf += packet_source.buf_use if not no_viz and not short: - click.echo(f"{status_line} [{flags}] " - f"{source._cli.size:04d}/{source._cli.capacity} " - f"cpu:{cpu_percent:02.0f} ({max_cpu_percent:02.0f}) " - f"t:{scan_t:.04f}s") + sline = f"|{status_str(status_line[idx])}|" + sline += " " * int(max_packets_per_frame + 2 - len(sline)) + click.echo( + f"{idx:<2}: {sline} [{flags}] " + f"{packet_source.buf_use:04d} " + f"cpu:{cpu_percent:02.0f} ({max_cpu_percent:03.0f}) " + f"t:{scan_t:.04f}s") - status_line = status_line_init + status_line[idx] = status_line_init[idx] - last_scan_ts = now - frames_cnt += 1 + last_scan_ts[idx] = now + frames_cnt[idx] += 1 - if ((n_frames and frames_cnt >= n_frames) - or (n_seconds and now - start_ts >= n_seconds)): + if ((n_frames and frames_cnt[idx] >= n_frames) + or (n_seconds and now - start_ts[idx] >= n_seconds)): break except KeyboardInterrupt: click.echo("\nInterrupted") finally: - missed_packets_percent = 0 - if total_packets + missed_packets: - missed_packets_percent = 100 * missed_packets / (total_packets + - missed_packets) + missed_packets_percent = [0] * len(data_source.metadata) + ooo_packets_percent = [0] * len(data_source.metadata) + all_packets_cnt = sum(total_packets) + sum(missed_packets) + all_missed_packets_percent = 0 + all_ooo_packets_percent = 0 + + if all_packets_cnt: + all_missed_packets_percent = 100 * sum(missed_packets) / all_packets_cnt + missed_packets_percent = [ + (100 * mp / (tp + mp)) if tp + mp > 0 else 0 + for mp, tp in zip(missed_packets, total_packets) + ] + ooo_packets_percent = [ + (100 * oop / tp) if tp > 0 else 0 + for oop, tp in zip(ooo_packets, total_packets) + ] + + if sum(total_packets): + all_ooo_packets_percent = 100 * sum(ooo_packets) / sum(total_packets) + + total_packets_str = "" + frames_cnt_str = "" + if len(data_source.metadata) > 1: + total_packets_str = f", {total_packets}" + frames_cnt_str = f", {frames_cnt}" + + missed_packets_str = "" + if sum(missed_packets) and len(data_source.metadata) > 1: + missed_packets_str = ", ".join([ + f"{mp} ({mpp:.02f}%)" + for mp, mpp in zip(missed_packets, missed_packets_percent) + ]) + missed_packets_str = f", [{missed_packets_str}]" + + ooo_packets_str = "" + if sum(ooo_packets) and len(data_source.metadata) > 1: + ooo_packets_str = ", ".join([ + f"{op} ({opp:.02f}%)" + for op, opp in zip(ooo_packets, ooo_packets_percent) + ]) + ooo_packets_str = f", [{ooo_packets_str}]" + avg_cpu_load = 0 avg_max_cpu_load = 0 avg_packets_in_buf = 0 - if frames_cnt: - avg_cpu_load = total_avg_cpu / frames_cnt - avg_max_cpu_load = total_max_cpu / frames_cnt - avg_packets_in_buf = total_packets_in_buf / frames_cnt + total_frames_cnt = sum(frames_cnt) + if all(frames_cnt): + avg_cpu_load = total_avg_cpu / frames_cnt[0] + avg_max_cpu_load = total_max_cpu / frames_cnt[0] + avg_packets_in_buf = total_packets_in_buf / total_frames_cnt if not short: - click.echo(f"Summary: {info.prod_line}, {info.mode}, " - f"{info.format.udp_profile_lidar}, " - f"{info.format.column_window}, [{flags}]:") - click.echo(f" lidar_packets received: {total_packets}") - click.echo(f" lidar_packets missed : {missed_packets} " - f"({missed_packets_percent:.02f}%)") - click.echo(f" total frames : {frames_cnt}") - click.echo(f" avg packets in buf : {avg_packets_in_buf:.02f}" + click.echo(f"Summary [{flags}]:") + click.echo(" sensors:") + for idx, info in enumerate(data_source.metadata): + click.echo( + f" {idx:<5}: {info.prod_line}, {info.mode}, " + f"{info.format.udp_profile_lidar}, {info.format.column_window}" + ) + + click.echo(" lidar_packets:") + click.echo(f" received : {sum(total_packets)}" + f"{total_packets_str}") + + if not is_scan_source: + click.echo(f" out-of-order : {sum(ooo_packets)} " + f"({all_ooo_packets_percent:.02f}%)" + f"{ooo_packets_str}") + + click.echo( + f" missed : {sum(missed_packets)} " + f"({all_missed_packets_percent:.02f}%){missed_packets_str}") + + if hasattr(source, "id_error_count"): + if isinstance(source.id_error_count, list): + error_cnt_str = f"{sum(source.id_error_count)}" + if sum(source.id_error_count) and len(source.metadata) > 1: + error_cnt_str += f", {source.id_error_count}" + else: + error_cnt_str = f"{source.id_error_count}" + click.echo(f" id errors : {error_cnt_str}") + + click.echo(f" total frames : {total_frames_cnt}{frames_cnt_str}") + click.echo(f" avg packets in buf : {avg_packets_in_buf:.02f} " f"/ {source._cli.capacity}") click.echo(f" avg CPU loads : {avg_cpu_load:.02f}% " f"({avg_max_cpu_load:.02f}%)") - # one line summary for spreadsheets use - click.echo(f"-,{info.prod_line},{info.mode}," - f"{info.format.udp_profile_lidar}," - f"{flags}," - f"{total_packets}," - f"{missed_packets}," - f"{missed_packets_percent:.02f}," - f"{frames_cnt}," - f"{avg_packets_in_buf:.02f}," - f"{source._cli.capacity}," - f"{avg_cpu_load:.02f}," - f"{avg_max_cpu_load:.02f}") + # one line summary for spreadsheets use (only for single sensor) + if len(data_source.metadata) == 1: + info = data_source.metadata[0] + click.echo(f"-,{info.prod_line},{info.mode}," + f"{info.format.udp_profile_lidar}," + f"{flags}," + f"{sum(total_packets)}," + f"{sum(missed_packets)}," + f"{all_missed_packets_percent:.02f}," + f"{sum(frames_cnt)}," + f"{avg_packets_in_buf:.02f}," + f"{source._cli.capacity}," + f"{avg_cpu_load:.02f}," + f"{avg_max_cpu_load:.02f}") data_source.close() diff --git a/python/src/ouster/cli/plugins/discover.py b/python/src/ouster/cli/plugins/discover.py index a6cf6e82..a4845c71 100644 --- a/python/src/ouster/cli/plugins/discover.py +++ b/python/src/ouster/cli/plugins/discover.py @@ -6,23 +6,28 @@ This is adapted from zeroconf's async_browser.py example. """ -import socket -from typing import Optional +import json +import logging +from typing import Optional, Tuple import requests import time from socket import AddressFamily import asyncio import click from ouster.cli.core import cli -from ouster.client import get_config from zeroconf import IPVersion, ServiceStateChange, Zeroconf from concurrent.futures import ThreadPoolExecutor, as_completed +import zeroconf from zeroconf.asyncio import ( AsyncServiceBrowser, AsyncServiceInfo, AsyncZeroconf, ) from psutil import net_if_addrs +from sys import version_info +import importlib.metadata +import packaging.version +import ipaddress import sys host_interfaces = net_if_addrs() @@ -32,22 +37,25 @@ if address.family == AddressFamily.AF_INET or address.family == AddressFamily.AF_INET6 ] -text_columns = ["HOSTNAME", "ADDRESS", "MODEL", "UDP DESTINATION", "DEST. LIDAR PORT", "DEST. IMU PORT"] -text_column_widths = [30, 20, 16, 45, 20, 20] -mdns_services = ["_roger._tcp.local.", "_ouster-lidar._tcp.local."] rethrow_exceptions = False -def _get_config(server): - """ This function is for testing so we can monkeypatch it out """ - return get_config(server) - - class AsyncServiceDiscovery: - def __init__(self, ip_version: IPVersion, timeout: int, continuous: bool, pool_size: int = 10) -> None: + def __init__(self, interfaces, ip_version: IPVersion, output: str, timeout: int, + continuous: bool, show_user_data, + service_names=["_roger._tcp.local.", "_ouster-lidar._tcp.local."], + pool_size: int = 10, socket_timeout: int = 2) -> None: + + self.interfaces = interfaces + if type(interfaces) is not zeroconf.InterfaceChoice: + self.interfaces = list(interfaces) self.ip_version = ip_version + self.output = output self.timeout = timeout self.continuous = continuous + self.show_user_data = show_user_data + self.service_names = service_names + self.socket_timeout = socket_timeout self.aiobrowser: Optional[AsyncServiceBrowser] = None self.aiozc: Optional[AsyncZeroconf] = None self.start_time = time.time() @@ -56,24 +64,29 @@ def __init__(self, ip_version: IPVersion, timeout: int, continuous: bool, pool_s self._futures_shadow = [] self._processed_hostnames = [] self._lock = asyncio.Lock() + # Note - longer than the default of 3s, but it doesn't affect the overall duration. + self.async_request_timeout_ms = 10000 + self.shutdown = False async def async_run(self) -> None: - self.aiozc = AsyncZeroconf(ip_version=self.ip_version) + self.aiozc = AsyncZeroconf(self.interfaces, False, self.ip_version) self.aiobrowser = AsyncServiceBrowser( - self.aiozc.zeroconf, mdns_services, handlers=[self.async_on_service_state_change] + self.aiozc.zeroconf, self.service_names, handlers=[self.async_on_service_state_change] ) - while True: + results = [] + while not self.shutdown: await asyncio.sleep(0.5) async with self._lock: self._futures = self._futures_shadow self._futures_shadow = [] for future in as_completed(self._futures): - strs, color, error = future.result() - if error is not None: - click.echo(error) - click.echo(click.style(''.join(strs), fg=color)) + result = future.result() + results.append(result) + if self.output == 'text': + text, color = get_output_for_sensor(result) + click.secho(text, color=color) self._futures = [] if not self.continuous and self.timeout: @@ -82,9 +95,12 @@ async def async_run(self) -> None: self._executor.shutdown() else: self._executor.shutdown(cancel_futures=True) - return + self.shutdown = True + if self.output == 'json': + print(json.dumps(results, indent=2)) async def async_close(self) -> None: + self.shutdown = True assert self.aiozc is not None assert self.aiobrowser is not None await self.aiobrowser.async_cancel() @@ -92,124 +108,230 @@ async def async_close(self) -> None: def async_on_service_state_change(self, zeroconf: Zeroconf, service_type: str, name: str, state_change: ServiceStateChange) -> None: - if state_change is not ServiceStateChange.Added: + if self.shutdown or state_change is not ServiceStateChange.Added: return # TODO: handle other state changes asyncio.ensure_future(self.async_display_service_info(zeroconf, service_type, name)) + async def create_future_task_for_info(self, info): + async with self._lock: + if not self.shutdown and info.server not in self._processed_hostnames: + self._processed_hostnames.append(info.server) + f = self._executor.submit(get_all_sensor_info, info, self.socket_timeout, self.show_user_data) + self._futures_shadow.append(f) + async def async_display_service_info(self, zeroconf: Zeroconf, service_type: str, name: str) -> None: + # try to get service info info = AsyncServiceInfo(service_type, name) - await info.async_request(zeroconf, 1000) - if info and info.server: - async with self._lock: - if info.server not in self._processed_hostnames: - self._processed_hostnames.append(info.server) - f = self._executor.submit(service_info_as_text_str, info) - self._futures_shadow.append(f) + await info.async_request(zeroconf, self.async_request_timeout_ms) + # submit a task to obtain metadata, etc + await self.create_future_task_for_info(info) -def address_bytes_to_ip_str(b: bytes) -> str: - return socket.inet_ntop( - socket.AF_INET6 if len(b) == 16 else socket.AF_INET, b - ) +def parse_scope_id(address: str) -> Tuple[str, Optional[int]]: + addr_str, _, scope_str = address.partition('%') + scope_id = int(scope_str) if scope_str else None + return (addr_str, scope_id) -def get_address(info) -> str: - addresses = [] - for address in info.dns_addresses(): - address = address.address - try: - addr_type = socket.AF_INET6 if len(address) == 16 else socket.AF_INET +def is_link_local_ipv6_address_and_missing_scope_id(address: str) -> bool: + address, scope_id = parse_scope_id(address) + if scope_id: + return False + ip_addr = ipaddress.ip_address(address) + return type(ip_addr) is ipaddress.IPv6Address and ip_addr.is_link_local - addr_string = socket.inet_ntop(addr_type, address) - addresses.append((addr_string, addr_type)) - except IndexError: - pass - addresses.append((info.server, socket.AF_INET)) +def get_text_for_oserror(error_prefix: str, address: str, e: Exception) -> str: + if "invalid argument" in str(e).lower() and is_link_local_ipv6_address_and_missing_scope_id(address): + zeroconf_version = packaging.version.parse(importlib.metadata.version('zeroconf')) + if version_info < (3, 9): + return f"{error_prefix} - this version of Python does not support scoped \ +link-local IPv6 addresses, which are necessary to retrieve the sensor configuration." + elif zeroconf_version < packaging.version.parse('0.131.0'): + return f"{error_prefix} - the installed version of zeroconf ({zeroconf_version}) \ +may not be able to provide scoped link-local IPv6 addresses, \ +which are necessary to retrieve the sensor configuration.\n" \ + + "Please refer to this GitHub pull request for specifics: \ +https://github.com/python-zeroconf/python-zeroconf/pull/1322" + else: + return f"{error_prefix} - {e}" + else: + return f"{error_prefix} - {e}" - for addr_string, addr_type in addresses: - s = socket.socket(addr_type, socket.SOCK_STREAM) - result = -1 - try: - if addr_type == socket.AF_INET6: - result = s.connect_ex((addr_string, 80, 0, 0)) - addr_string = f"[{addr_string}]" - else: - result = s.connect_ex((addr_string, 80)) - except socket.gaierror: - pass - finally: - s.close() - - if not result: - return addr_string - - return None - - -def service_info_as_text_str(info) -> str: - addresses = info.dns_addresses() - ip_addr_string = '-' - prod_line = '-' - udp_dest = '-' - udp_port_lidar = '-' - udp_port_imu = '-' - error = None - color = 'white' +def format_hostname_for_url(hostname_str: str) -> str: + # if it's an IPv6 address it must be formatted try: - ip_addr_string = '-' - try: - first_address = addresses[0] - ip_addr_string = address_bytes_to_ip_str(first_address.address) - except IndexError: - pass - server_addr = get_address(info) - if server_addr is None: - color = 'bright_yellow' - raise Exception(f"Can't Connect To Sensor: Hostname: {info.server} Addresses: {addresses}") - url = f"http://{server_addr}/api/v1/sensor/metadata/sensor_info" - response = requests.get(url) - response_json = response.json() - prod_line = response_json.get('prod_line', prod_line) - config = _get_config(server_addr) - if config: - if config.udp_dest: - udp_dest = config.udp_dest - udp_port_lidar = str(config.udp_port_lidar) - udp_port_imu = str(config.udp_port_imu) - except Exception as e: - if rethrow_exceptions: - raise - else: - error = click.style(e, fg='yellow') + ip_addr = ipaddress.ip_address(hostname_str) + if type(ip_addr) is ipaddress.IPv6Address: + return f"[{str(ip_addr)}]" + return str(ip_addr) + except ValueError: + # if it's not an ip address, let's assume it's a hostname + return hostname_str + + +def get_sensor_info(hostname_or_address, socket_timeout): + url = f"http://{format_hostname_for_url(hostname_or_address)}/api/v1/sensor/metadata/sensor_info" + response = requests.get(url, timeout=socket_timeout) + return response.json() + + +def get_sensor_config(hostname_or_address, socket_timeout): + url = f"http://{format_hostname_for_url(hostname_or_address)}/api/v1/sensor/cmd/get_config_param?args=active" + response = requests.get(url, timeout=socket_timeout) + if response.status_code != 200: + return None + return response.json() + + +def get_sensor_network(hostname_or_address, socket_timeout): + url = f"http://{format_hostname_for_url(hostname_or_address)}/api/v1/system/network" + response = requests.get(url, timeout=socket_timeout) + return response.json() + + +def get_sensor_user_data(hostname_or_address, socket_timeout): + url = f"http://{format_hostname_for_url(hostname_or_address)}/api/v1/user/data" + response = requests.get(url, timeout=socket_timeout) + if response.status_code != 200: + return None + return response.json() + + +def get_output_for_sensor(sensor): + undefined_value = '-' + unknown = 'UNKNOWN' + sensor_hostname = sensor.get('hostname', undefined_value) + prod_line = unknown + udp_dest = undefined_value + udp_port_lidar = undefined_value + udp_port_imu = undefined_value + ipv4_address = None + ipv4_link_local = None + ipv6_address = None + ipv6_link_local = None + # TODO: decide if we need to display the addresses obtained via mDNS. + # (Probably not.) + # addresses = sensor.get('addresses') + sensor_info = sensor.get('sensor_info') + config = sensor.get('active_config') + network = sensor.get('network') + user_data = sensor.get('user_data') + firmware = sensor_info.get('image_rev', undefined_value) if sensor_info else undefined_value + sn = sensor_info.get('prod_sn', unknown) if sensor_info else unknown + warnings = sensor.get('warnings') + color = 'white' + if sensor_info: + prod_line = sensor_info.get('prod_line', unknown) + if config: + udp_dest = config.get('udp_dest', undefined_value) + udp_port_lidar = config.get('udp_port_lidar', undefined_value) + udp_port_imu = config.get('udp_port_imu', undefined_value) + if network: + + ipv4 = network.get('ipv4') + ipv4_address_type = 'static' if ipv4.get('override') else 'DHCP' + ipv4_address = ipv4.get('override') or ipv4.get('addr') + ipv4_link_local = ipv4.get('link_local') + + ipv6 = network.get('ipv6') + ipv6_address_type = 'static' if ipv6.get('override') else 'DHCP' + ipv6_address = ipv6.get('override') or ipv6.get('addr') + ipv6_link_local = ipv6.get('link_local') if udp_dest in host_addresses: color = 'green' - strs = [info.server, ip_addr_string, prod_line, udp_dest, udp_port_lidar, udp_port_imu] - for i in range(len(strs)): - strs[i] = strs[i].ljust(text_column_widths[i]) - return ''.join(strs), color, error + out = f"{prod_line} - {sn}\n* hostname: {sensor_hostname}\n" + if warnings: + out += click.style("* warnings:\n", fg='yellow') + for warning in warnings: + out += click.style(f" * {warning}\n", fg='yellow') + out += f"* firmware: {firmware}\n* addresses:\n" + if ipv4_address: + out += f" * IPv4 {ipv4_address_type} {ipv4_address}\n" + if ipv4_link_local: + out += f" * IPv4 link-local {ipv4_link_local}\n" + if ipv6_address: + out += f" * IPv6 {ipv6_address_type} {ipv6_address}\n" + if ipv6_link_local: + out += f" * IPv6 link-local {ipv6_link_local}\n" + out += f"* UDP destination address: {udp_dest}\n" + out += f"* UDP port lidar, IMU: {udp_port_lidar}, {udp_port_imu}\n" + if user_data: + out += f"* user data: {user_data}\n" + return out, color + + +def get_all_sensor_info(info, socket_timeout, show_user_data) -> str: + addresses = info.parsed_scoped_addresses(IPVersion.All) + sensor_info = None + config = None + network = None + user_data = None + warnings = [] + for address in addresses: + try: + if not sensor_info: + sensor_info = get_sensor_info(address, socket_timeout) + if not config: + config = get_sensor_config(address, socket_timeout) + if not network: + network = get_sensor_network(address, socket_timeout) + if show_user_data and not user_data: + user_data = get_sensor_user_data(address, socket_timeout) + except OSError as e: + warning_prefix = f"Could not connect to {info.server} via {address}" + warnings.append( + get_text_for_oserror( + warning_prefix, + address, + e + ) + ) + return { + 'hostname': info.server, + 'addresses': addresses, + 'active_config': config, + 'sensor_info': sensor_info, + 'network': network, + 'user_data': user_data, + 'warnings': warnings, + } @cli.command() -@click.option('-t', '--timeout', help='Run for the specified number of seconds', default=5, type=int) +@click.option('-o', '--output', help='Provide the output in the specified format', + type=click.Choice(['text', 'json'], case_sensitive=False), default='text') +@click.option('-t', '--timeout', help='Run for the specified number of seconds', default=10, type=int) @click.option('-c', '--continuous', help='Run continuously', is_flag=True, default=False) +@click.option('--http-timeout', help='The timeout for HTTP requests for sensor info', default=2, type=int) +@click.option('--interface', help="Address(es) of interface(s) to listen for mDNS services.", default=[], multiple=True) +@click.option('-u', '--show-user-data', help="Display sensor user data if defined.", is_flag=True, default=False) @click.pass_context -def discover(ctx, timeout, continuous): +def discover(ctx, output, timeout, continuous, http_timeout, interface, show_user_data): """Perform a one-time or continuous network search for Ouster sensors. """ global rethrow_exceptions + if output == 'json' and continuous: + click.secho("Sorry, ouster-cli cannot produce JSON output when running continuously.", fg='yellow', err=True) + sys.exit(1) rethrow_exceptions = ctx.obj.get('TRACEBACK', False) - strs = text_columns - # TODO: extract a text row fmt method - for i in range(len(strs)): - strs[i] = strs[i].ljust(text_column_widths[i]) - click.echo(''.join(strs)) loop = asyncio.get_event_loop() - runner = AsyncServiceDiscovery(IPVersion.V4Only, timeout, continuous) + if not interface: + interface = zeroconf.InterfaceChoice.All + logging.getLogger('zeroconf').propagate = False + runner = AsyncServiceDiscovery( + interface, + IPVersion.All, + output, + timeout, + continuous, + show_user_data, + socket_timeout=http_timeout + ) try: loop.run_until_complete(runner.async_run()) except KeyboardInterrupt: diff --git a/python/src/ouster/cli/plugins/io_type.py b/python/src/ouster/cli/plugins/io_type.py deleted file mode 100644 index 8bd36458..00000000 --- a/python/src/ouster/cli/plugins/io_type.py +++ /dev/null @@ -1,112 +0,0 @@ -import socket -import os -from enum import Enum -from typing import Optional - - -class OusterIoType(Enum): - SENSOR = 1 - PCAP = 2 - OSF = 3 - ROSBAG = 4 - CSV = 5 - PLY = 6 - PCD = 7 - LAS = 8 - - def __str__(self): - if self.value == 1: - return "SENSOR" - if self.value == 2: - return "PCAP" - if self.value == 3: - return "OSF" - if self.value == 4: - return "ROSBAG" - if self.value == 5: - return "CSV" - if self.value == 6: - return "PLY" - if self.value == 7: - return "PCD" - if self.value == 8: - return "LAS" - return "UNKNOWN" - - -def extension_from_io_type(source: OusterIoType) -> Optional[str]: - """Return a file extension for the given source type, if it's a file-based source.""" - if source == OusterIoType.PCAP: - return '.pcap' - elif source == OusterIoType.OSF: - return '.osf' - elif source == OusterIoType.ROSBAG: - return '.bag' - elif source == OusterIoType.CSV: - return '.csv' - elif source == OusterIoType.PLY: - return '.ply' - elif source == OusterIoType.PCD: - return '.pcd' - elif source == OusterIoType.LAS: - return '.las' - return None - - -def io_type_from_extension(source: str) -> OusterIoType: - """Return an OusterIoType given the file extension for the provided file path""" - source_lower = source.lower() - if source_lower.endswith('.pcap'): - return OusterIoType.PCAP - elif source_lower.endswith('.osf'): - return OusterIoType.OSF - elif source_lower.endswith('.bag'): - return OusterIoType.ROSBAG - elif source_lower.endswith('.csv'): - return OusterIoType.CSV - elif source_lower.endswith('.ply'): - return OusterIoType.PLY - elif source_lower.endswith('.pcd'): - return OusterIoType.PCD - elif source_lower.endswith('.las'): - return OusterIoType.LAS - else: - raise ValueError('Expecting .pcap, .osf, .bag, .ply, .pcd, .las or .csv.') - - -def io_type_from_magic(source: str) -> Optional[OusterIoType]: - """Try to return an OusterIoType given a file path, using python-magic""" - try: - import magic - # Note - python-magic doesn't know what .osf or .bag files are. - type_wizard = magic.from_file(os.path.realpath(source)) - if "pcap capture file" in type_wizard: - return OusterIoType.PCAP - elif "Point Cloud Data" in type_wizard: - return OusterIoType.PCD - elif "LIDAR point data records" in type_wizard: - return OusterIoType.LAS - elif "CSV text" in type_wizard: - return OusterIoType.CSV - - except ImportError: - pass - return None - - -def io_type(source: str) -> OusterIoType: - """Return a OusterIoType given a source arg str""" - if os.path.isfile(source): - magic_type = io_type_from_magic(source) - if magic_type: - return magic_type - io_type = io_type_from_extension(source) - return io_type - try: - if socket.gethostbyname(source): - return OusterIoType.SENSOR - except Exception: - pass - - raise ValueError("Source type expected to be a sensor hostname, ip address," - " or a .pcap, .osf, or .bag file.") diff --git a/python/src/ouster/cli/plugins/source.py b/python/src/ouster/cli/plugins/source.py index 41b66f45..093287ff 100644 --- a/python/src/ouster/cli/plugins/source.py +++ b/python/src/ouster/cli/plugins/source.py @@ -1,344 +1,46 @@ # type: ignore import click +import re +import threading +import numpy as np +from itertools import islice from ouster.cli.core import cli from ouster.cli.core.cli_args import CliArgs from ouster.cli.core.util import click_ro_file -import ouster.cli.core.pcap -import ouster.cli.core.sensor -from typing import List, Optional -from .io_type import extension_from_io_type, io_type_from_extension, io_type, OusterIoType +from ouster.sdk import open_source +from ouster.sdk.client.core import ClientTimeout +from ouster.sdk.io_type import (extension_from_io_type, io_type, OusterIoType) +import ouster.cli.plugins.source_pcap as pcap_cli +import ouster.cli.plugins.source_osf as osf_cli +import ouster.cli.plugins.source_sensor as sensor_cli +from typing import (List, Optional, Iterable, Tuple, Union) +from .source_save import SourceSaveCommand +from .source_util import (CoupledTee, + SourceCommandContext, + SourceCommandCallback, + SourceCommandType, + source_multicommand, + _join_with_conjunction) _source_arg_name: str = 'source' -_output_file_arg_name: str = 'output_file' - - -def _join_with_conjunction(things_to_join: List[str], separator: str = ', ', conjunction: str = 'or') -> str: - """Given a list of things, return a string like - 'Thing A, Thing B, or Thing C' - """ - strings = [str(x) for x in things_to_join] - if len(strings) > 1: - strings[-1] = conjunction + " " + strings[-1] - if len(strings) == 2: - return ' '.join(strings) - return separator.join(strings) @click.command() -@click.argument('keyval', metavar='[KEY VAL]...', type=str, nargs=-1) -@click.option('-d', 'dump', is_flag=True, help='Dump current configuration') -@click.option('-c', 'file', type=click.File(), help='Read config from file') -@click.option('-u', 'auto', is_flag=True, help='Set automatic udp dest') -@click.option('-p', 'persist', is_flag=True, help='Persist configuration') -@click.option('-s/-n', 'standby', default=None, help='Set STANDBY or NORMAL') -@click.pass_context -def sensor_config(ctx, *args, **kwargs) -> None: - """ - Manipulate the sensor configuration. - - Update the sensor configuration or dump it to stdout. The first positional - argument is the sensor hostname; remaining arguments are interpreted as - config parameter key/value pairs, for example: - - \b - $ ouster-cli sensor config os-99xxxxxxxxxx \\ - lidar_mode 2048x10 azimuth_window "[20000, 60000]" - - If no options or config param values are specified, use the default UDP - ports, automatic UDP destination, full azimuth azimuth window, and set the - operating mode to NORMAL. - """ - # Implements ouster-cli source config - source = ctx.obj.get(_source_arg_name) - kwargs['hostname'] = source - # TODO refactor - ctx.forward(ouster.cli.core.sensor.config, *args, **kwargs) - - -@click.command -@click.option('--legacy/--non-legacy', - default=False, - help="Use legacy metadata format or not") -@click.pass_context -def sensor_info(ctx, *args, **kwargs) -> None: - """Retrieve the sensor metadata""" # Implements ouster-cli source metadata - source = ctx.obj.get(_source_arg_name) - kwargs['hostname'] = source - # TODO refactor - ctx.forward(ouster.cli.core.sensor.metadata, *args, **kwargs) - - -@click.command -@click.option('-l', '--lidar-port', type=int, default=None, help="Lidar Port") -@click.option('-i', '--imu-port', type=int, default=None, help="default: IMU Port") -@click.option('-n', '--n-frames', type=int, help="number of lidar frames") -@click.option('-s', '--n-seconds', default=0.0, help="max time to record") -@click.option('--chunk-size', default=0, help="split output by size (MB)") -@click.option('-b', '--buf-size', default=640, hidden=True, help="Max packets to buffer") -@click.option('-t', '--timeout', default=1.0, help="Seconds to wait for data") -@click.option('-p', '--prefix', default="", help="Recorded file name prefix") -@click.option('--viz', default=False, is_flag=True, help="Visualize point cloud during recording") -@click.option('--legacy/--non-legacy', - default=False, - help="Use legacy metadata format or not") -@click.option('-x', '--do-not-reinitialize', is_flag=True, default=False, - help="Do not reinitialize (by default it will reinitialize if needed)") -@click.option('-y', '--no-auto-udp-dest', is_flag=True, default=False, - help="Do not automatically set udp_dest (by default it will auto set udp_dest") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -@click.pass_context -def sensor_record(ctx, *args, **kwargs) -> None: - """Record a sensor data stream as PCAP""" # Implements ouster-cli source record - source = ctx.obj.get(_source_arg_name) - kwargs['hostname'] = source - # TODO refactor - ctx.forward(ouster.cli.core.pcap.pcap_record, *args, **kwargs) - - -@click.command -@click.option('-b', '--buf-size', default=256, hidden=True, help="Max packets to buffer") -@click.option('-e', '--extrinsics', type=float, nargs=16, - help='Lidar sensor extrinsics to use in viz') -@click.option('-m', '--meta', help="Provide separate metadata to use with sensor", - type=click_ro_file, hidden=True) -@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") -@click.option('-l', '--lidar-port', type=int, default=None, help="Lidar port") -@click.option('-s', '--soft-id-check', is_flag=True, hidden=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.option('-t', '--timeout', default=1.0, help="Seconds to wait for data") -@click.option('-v', '--verbose', is_flag=True, help="Print some debug output") -@click.option('-x', '--do-not-reinitialize', is_flag=True, default=False, - help="Do not reinitialize (by default it will reinitialize if needed)") -@click.option('-y', '--no-auto-udp-dest', is_flag=True, default=False, - help="Do not automatically set udp_dest (by default it will auto set udp_dest") -@click.option('--extrinsics', - type=float, - required=False, - nargs=16, - help='Lidar sensor extrinsics to use in viz') -@click.option('--soft-id-check', - is_flag=True, - hidden=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -@click.pass_context -def sensor_viz(ctx, *args, **kwargs) -> None: - """Visualize the sensor data in a 3D viewer""" # Implements ouster-cli source viz - source = ctx.obj.get(_source_arg_name) - kwargs['hostname'] = source - # TODO refactor - ctx.forward(ouster.cli.core.sensor.viz, *args, **kwargs) - - -@click.command -@click.argument(_output_file_arg_name, required=True) -@click.option('-m', '--meta', required=False, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-l', '--lidar-port', default=None, type=int, help="Dest port of lidar data") -@click.option('-i', '--imu-port', default=None, type=int, help="Dest port of imu data") -@click.option('-o', '--output', required=False, help="BAG output filename") -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.pass_context -def bag_from_pcap(ctx, output_file, meta, lidar_port, imu_port, output, soft_id_check) -> None: - """Convert the source PCAP to Rosbag""" - # Implements ouster-cli source .pcap convert .bag - source = ctx.obj.get(_source_arg_name) - return ouster.cli.core.pcap.pcap_to_bag_impl(source, meta, lidar_port, imu_port, output_file, soft_id_check) - - -@click.command -@click.argument(_output_file_arg_name, required=True) -@click.option('-m', '--meta', type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('--start-index', default=0, help="index of scan to start outputting") -@click.option('--num-scans', default=1, help="number of scans to save from pcap to csv files") -@click.pass_context -def csv_from_pcap(ctx, - output_file: str, - meta: Optional[str], - start_index: Optional[int], - num_scans: Optional[int]) -> None: - """Convert the source PCAP to CSV""" - - source = ctx.obj.get(_source_arg_name) - if num_scans is not None and num_scans > 1: - click.echo("INFO: You've selected to output multiple scans. " - "Your output CSV names will be suffixed with index.") - csv_base = output_file[0:-4] - output_paths = [f'{csv_base}_{idx:06d}.csv' for idx in range(0, num_scans)] - else: - output_paths = [output_file] - - return ouster.cli.core.pcap.pcap_to_csv_impl(source, meta, start_index, num_scans, output_paths) - - -class SourceConvertCommand(click.Command): - """Generalizes ouster-cli source convert - """ - def __init__(self, *args, **kwargs): - kwargs['add_help_option'] = False - super().__init__(*args, **kwargs) - click.argument(_output_file_arg_name, required=True)(self) - - def get_output_type_file_extensions_str(self): - exts = sorted( - [extension_from_io_type(source_type) for source_type in self.conversions.keys()] - ) - # TODO: hack remove with OSF is allowed - if '.osf' in exts: - exts.remove('.osf') - return _join_with_conjunction(exts) - - def invoke(self, ctx): - output_type_file_extensions = self.get_output_type_file_extensions_str() - file_extension_err_text =\ - f"Expected {_output_file_arg_name.upper()} extension to be {output_type_file_extensions}" - - try: - output_file = ctx.params.get(_output_file_arg_name) - output_type = io_type_from_extension(output_file) - except (KeyError, ValueError): - if CliArgs().has_any_of(ctx.help_option_names): - # only print output_file if there's a sample output_file type - # TODO: remove hack on length and fix Borg - click.echo(self.get_help(ctx)) - if len(CliArgs().args) > 4: - click.echo(f"\nERROR: {file_extension_err_text}") - return - raise click.exceptions.UsageError(file_extension_err_text) - try: - convert_command = self.conversions[output_type] - if CliArgs().has_any_of(ctx.help_option_names): - click.echo(convert_command.get_help(ctx)) - else: - convert_command.parse_args(ctx, [output_file] + ctx.args) - try: - ctx.forward(convert_command, *ctx.args) - except TypeError: - if len(ctx.args) > 0: - raise ouster.cli.core.SourceArgsException(ctx) - except KeyError: - raise click.exceptions.UsageError(file_extension_err_text) - - -class PcapConvertCommand(SourceConvertCommand): - """Implements ouster-cli source .pcap convert , - """ - def __init__(self, *args, **kwargs): - kwargs['help'] = f"Convert from PCAP to {self.get_output_type_file_extensions_str()}" - super().__init__(*args, **kwargs) - # this is a map from output type to a conversion function - conversions = { - # OusterIoType.ROSBAG: bag_from_pcap, - OusterIoType.CSV: csv_from_pcap, - } - - -@click.command -@click.option('-n', type=int, default=0, help="Read only INTEGER packets.") -@click.pass_context -def pcap_info(ctx, *args, **kwargs) -> None: - """Display info about the PCAP file""" - source = ctx.obj.get(_source_arg_name) - kwargs['file'] = source - ctx.forward(ouster.cli.core.pcap.pcap_info, *args, **kwargs) - - -@click.command -@click.argument('output') -@click.option('-s', '--start-frame', default=0, help="Start frame index") -@click.option('-n', '--num-frames', default=10, help="Number of frames") -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -@click.option('-l', '--lidar-port', default=None, - type=int, help="Dest. port of lidar data") -@click.option('-i', '--imu-port', type=int, default=None, help="Dest. port of imu data") -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.pass_context -def pcap_slice(ctx, *args, **kwargs) -> None: - """Writes a portion of the input PCAP file to a new file""" - source = ctx.obj.get(_source_arg_name) - kwargs['file'] = source - ctx.forward(ouster.cli.core.pcap.pcap_slice, *args, **kwargs) - - -@click.command -@click.option('-m', '--meta', required=False, type=click_ro_file, - help="Metadata for PCAP, helpful if automatic metadata resolution fails") -# TWS 20230627: '--cycle' is a deprecated option and only hidden to prevent breaking scripts that may be using it -@click.option('-c', '--cycle', is_flag=True, help="Loop playback", hidden=True) -@click.option('-e', '--on-eof', default='loop', type=click.Choice(['loop', 'stop', 'exit']), - help="Loop, stop, or exit after reaching end of file") -@click.option('-l', '--lidar-port', default=None, type=int, help="Dest. port of lidar data") -@click.option('-i', '--imu-port', default=None, help="Dest. port of imu data") -@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") -@click.option('-b', '--buf', default=50, help="Scans to buffer for stepping.") +@click.option("-p", "--pause", is_flag=True, help="Pause at first lidar scan") +@click.option("-e", "--on-eof", default='loop', type=click.Choice(['loop', 'stop', 'exit']), + help="Loop, stop or exit after reaching end of file") @click.option('-r', '--rate', - default=1.0, - help="Playback rate. One of 0.25, 0.5, 0.75, 1.0, 1.5, 2.0, 3.0") -@click.option('--extrinsics', - type=float, - required=False, - nargs=16, - help='Lidar sensor extrinsics to use in viz') -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa -@click.option("-p", "--pause", is_flag=True, help="Pause after the first scan") + default="1", + help="Playback rate.", + type=click.Choice(["0.25", "0.5", "0.75", "1", "1.5", "2", "3"])) @click.option("--pause-at", default=-1, - help="Lidar Scan number to pause") -@click.option('--multi', - is_flag=True, - help='Turn on multi sensor pcap handling and metadata resolutions') -@click.option('--timeout', - type=float, - default=10.0, - help="Timeout in seconds, after which the script will terminate " - "if no lidar data is encountered in the PCAP file") -@click.option('--kitti-poses', - required=False, - type=click_ro_file, - help="Poses file in Kitti format, one pose per scan " - "(can be generated by kiss-icp)") + help="Lidar Scan number to pause at") +@click.option("--accum-num", + default=0, + help="Integer number of scans to accumulate") @click.option("--accum-num", default=0, help="Integer number of scans to accumulate") @@ -357,24 +59,86 @@ def pcap_slice(ctx, *args, **kwargs) -> None: default=0.001, help="Ratio of random points of every scan to add to an overall map") @click.pass_context -def pcap_viz(ctx, *args, **kwargs) -> None: - """Visualize the PCAP data in a 3D viewer""" - source = ctx.obj.get(_source_arg_name) - kwargs['file'] = source - # TODO refactor - ctx.forward(ouster.cli.core.pcap.pcap_viz, *args, **kwargs) - +@source_multicommand(type=SourceCommandType.CONSUMER) +def source_viz(ctx: SourceCommandContext, pause: bool, on_eof: str, pause_at: int, accum_num: int, + accum_every: Optional[int], accum_every_m: Optional[float], + accum_map: bool, accum_map_ratio: float, rate: str) -> SourceCommandCallback: + """Visualize LidarScans in a 3D viewer.""" + try: + from ouster.sdk.viz import SimpleViz, scans_accum_for_cli + except ImportError as e: + raise click.ClickException(str(e)) + + # ugly workarounds ensue + if on_eof == 'loop': + source = ctx.scan_source + from ouster.sdk.client import ScanSourceAdapter + if isinstance(source, ScanSourceAdapter): + source = source._scan_source + # NOTE: setting it here instead of at open_source stage because we do not want to propagate + # the flag up to `source` command + source._cycle = True + + if pause and pause_at == -1: + pause_at = 0 + + ctx.scan_iter, scans = CoupledTee.tee(ctx.scan_iter, + terminate=ctx.terminate_evt) + metadata = ctx.scan_source.metadata + scans_accum = scans_accum_for_cli(metadata, + accum_num=accum_num, + accum_every=accum_every, + accum_every_m=accum_every_m, + accum_map=accum_map, + accum_map_ratio=accum_map_ratio) + + def viz_thread_fn(): + sv = SimpleViz(metadata, scans_accum=scans_accum, + rate=float(rate), pause_at=pause_at, on_eof=on_eof) + sv.run(scans) + ctx.terminate_evt.set() + + if ctx.main_thread_fn is not None: + raise RuntimeError( + "A main-thread required function has already been set.") + ctx.main_thread_fn = viz_thread_fn + + +def extract_slice_indices(click_ctx: Optional[click.core.Context], + param: Optional[click.core.Argument], value: str): + """Validate and extract slice indices of the form [start]:[stop][:step].""" + index_matches = re.findall(r"^(-?\d*):(-?\d*):?(-?\d*)$", value) # noqa: W605 + + if not index_matches or len(index_matches[0]) != 3: + raise click.exceptions.BadParameter( + "slice indices must be of the form [start]:[stop][:step]") + parsed_indices = [int(i) if i != "" else None for i in index_matches[0]] + start, stop, step = parsed_indices[0], parsed_indices[1], parsed_indices[2] + # Check that indices are non-negative + if any(i < 0 if i is not None else False for i in parsed_indices): + raise click.exceptions.BadParameter( + "slice indices must be non-negative") + # Check that stop > start + if (stop is not None) and (not stop > start): + raise click.exceptions.BadParameter( + "slice stop index must be greater than start") + # Check that step > 1 + if (step is not None) and (not step > 0): + raise click.exceptions.BadParameter( + "slice step index must be greater than 0") + + return start, stop, step -class BagConvertCommand(SourceConvertCommand): - """Implements - ouster-cli source .bag convert - This method delegates to the appropriate command depending on the file - extension of the output file argument. - """ - conversions = { - # TODO - } +@click.command() +@click.argument('indices', required=True, callback=extract_slice_indices) +@click.pass_context +@source_multicommand(type=SourceCommandType.PROCESSOR) +def source_slice(ctx: SourceCommandContext, + indices: Tuple[Optional[int]]) -> SourceCommandCallback: + """Slice LidarScans streamed from SOURCE. Use the form [start]:[stop][:step].""" + start, stop, step = indices + ctx.scan_iter = islice(ctx.scan_iter, start, stop, step) class SourceMultiCommand(click.MultiCommand): @@ -387,68 +151,81 @@ class SourceMultiCommand(click.MultiCommand): def __init__(self, *args, **kwargs): kwargs['no_args_is_help'] = True + super().__init__(*args, **kwargs) self.commands = { + 'ANY': { + 'viz': source_viz, + 'slice': source_slice, + }, OusterIoType.SENSOR: { - 'config': sensor_config, - 'metadata': sensor_info, - 'record': sensor_record, - 'viz': sensor_viz, + 'config': sensor_cli.sensor_config, + 'metadata': sensor_cli.sensor_metadata, + 'save': SourceSaveCommand('save', context_settings=dict(ignore_unknown_options=True, + allow_extra_args=True)), }, OusterIoType.PCAP: { - # TODO SW-4407 not MVP - 'convert': PcapConvertCommand('convert', - context_settings=dict(ignore_unknown_options=True, allow_extra_args=True)), - 'info': pcap_info, - 'slice': pcap_slice, - 'viz': pcap_viz, - }, - # TODO SW-4407 not MVP - OusterIoType.ROSBAG: { - 'convert': BagConvertCommand('convert', hidden=True, - context_settings=dict(ignore_unknown_options=True, allow_extra_args=True)), + 'info': pcap_cli.pcap_info, + 'save': SourceSaveCommand('save', context_settings=dict(ignore_unknown_options=True, + allow_extra_args=True)), }, + OusterIoType.OSF: { + 'dump': osf_cli.osf_dump, + 'info': osf_cli.osf_info, + 'metadata': osf_cli.osf_metadata, + 'parse': osf_cli.osf_parse, + 'save': SourceSaveCommand('save', context_settings=dict(ignore_unknown_options=True, + allow_extra_args=True)), + } } + def get_supported_source_types(self): + return [iotype for iotype in self.commands.keys() if isinstance(iotype, OusterIoType)] + def get_source_file_extension_str(self): exts = sorted( - [extension_from_io_type(source_type) - for source_type in self.commands.keys() if extension_from_io_type(source_type)] + [extension_from_io_type(src_type) + for src_type in self.commands.keys() if extension_from_io_type(src_type)] ) - # TODO: remove hack when bag is introduced - exts.remove('.bag') - return _join_with_conjunction(exts) - def list_commands(self, ctx): + def list_commands(self, click_ctx: click.core.Context): """Get the source type from the click context and return the list of appropriate sub command names""" - source = ctx.params.get(_source_arg_name) - file_extensions_str = self.get_source_file_extension_str() - if not source and CliArgs().has_any_of(ctx.help_option_names): - # TODO comment out since it repeats - need to clean this up sometime - # click.echo(ctx.get_usage()) - # click.echo(f"\nERROR: Please specify a {_source_arg_name.upper()}, - # which should be a " f"sensor hostname, or a {file_extensions_str} file.") - command_dict_list = [] - for source_type in self.commands.keys(): - command_dict_list = command_dict_list + [{ f"{str(source_type)} {inner_command}":self.commands[source_type][inner_command] # noqa - for inner_command in self.commands[source_type].keys()}] - - all_command_dict = {} - for command_dict in command_dict_list: - all_command_dict.update(command_dict) - - return all_command_dict + source = click_ctx.params.get(_source_arg_name) + + if not source and CliArgs().has_any_of(click_ctx.help_option_names): + # Build a map from command name to command + command_to_types = {} + for src_type in self.commands.keys(): + for command_name in self.commands[src_type].keys(): + if command_name not in command_to_types: + command_to_types[command_name] = {} + if src_type == "ANY": + for supported_src_type in self.get_supported_source_types(): + command_to_types[command_name][supported_src_type] = self.commands[src_type][command_name] + else: + command_to_types[command_name][src_type] = self.commands[src_type][command_name] + + # Prefix command name with names of supported source types + command_to_types_renamed = {} + for key, value in command_to_types.items(): + prefix = _join_with_conjunction( + [t.name.upper() for t in value.keys()], separator="|", conjunction="") + command_to_types_renamed[f"{prefix} {key}"] = value + + return command_to_types_renamed + file_extensions_str = self.get_source_file_extension_str() if not source: param_decls = [_source_arg_name] param = click.core.Argument(param_decls=param_decls) - raise click.exceptions.MissingParameter(None, ctx, param=param) + raise click.exceptions.MissingParameter( + None, click_ctx, param=param) try: - return self.commands[io_type(source)].keys() + return {**self.commands[io_type(source)], **self.commands["ANY"]} except ValueError as e: # noqa: F841 - click.echo(ctx.get_usage()) + click.echo(click_ctx.get_usage()) raise click.exceptions.UsageError("Source type expected to be a sensor hostname, " f"ip address, or a(n) {file_extensions_str} file. " "For a sensor source, please check that you can " @@ -456,7 +233,7 @@ def list_commands(self, ctx): "source, please check that the file path you have " "provided exists.") except KeyError as e: # noqa: F841 - click.echo(ctx.get_usage()) + click.echo(click_ctx.get_usage()) raise click.exceptions.UsageError("Source type expected to be a sensor hostname, " f"ip address, or a(n) {file_extensions_str} file. " "For a sensor source, please check that you can " @@ -464,36 +241,239 @@ def list_commands(self, ctx): "source, please check that the file path you have " "provided exists.") - def get_command(self, ctx, name): + def get_command(self, click_ctx: click.core.Context, name: str): """Get the click.Command object for the given command name""" - source = ctx.params.get(_source_arg_name) - ctx.ensure_object(dict) + source = click_ctx.params.get(_source_arg_name) + click_ctx.ensure_object(SourceCommandContext) + ctx: SourceCommandContext = click_ctx.obj # add source to context so the command can access it - ctx.obj[_source_arg_name] = source - - list_commands = self.list_commands(ctx) - if name in list_commands: - if source: - return self.commands[io_type(source)][name] + ctx.source_uri = source + + command_list = self.list_commands(click_ctx) + if name in command_list: + if not source: + # If called by --help (without source), return the first implementation of a command + # NOTE: This results in help printing the docstring of only the first implementation + return command_list[name][list(command_list[name].keys())[0]] else: - return list_commands[name] - else: - return None + ctx.invoked_command_names.append(name) + return command_list[name] + return None - def invoke(self, ctx): + def invoke(self, click_ctx: click.core.Context): """Called when the source command is invoked. If called without any args, prints the help. Otherwise, the superclass method is called.""" - if not ctx.protected_args: - print(self.get_help(ctx)) + if not click_ctx.protected_args: + print(self.get_help(click_ctx)) return - super().invoke(ctx) + super().invoke(click_ctx) -@cli.group(cls=SourceMultiCommand) -@click.argument(_source_arg_name, required=True, type=click.Path()) -def source(source): +@cli.group(cls=SourceMultiCommand, chain=True) +@click.argument(_source_arg_name, required=True) +@click.option('-m', '--meta', required=False, type=click_ro_file, multiple=True, + help="Metadata for PCAP, helpful if automatic metadata resolution fails") +@click.option('-l', '--lidar-port', default=None, type=int, help="Dest. port of lidar data") +@click.option('-i', '--imu-port', default=None, type=int, help="Dest. port of imu data") +@click.option('-x', '--do-not-reinitialize', is_flag=True, default=False, + help="Do not reinitialize (by default it will reinitialize if needed)") +@click.option('-y', '--no-auto-udp-dest', is_flag=True, default=False, + help="Do not automatically set udp_dest (by default it will auto set udp_dest") +@click.option('-s', '--soft-id-check', is_flag=True, + help="Continue parsing lidar packets even if init_id/sn doesn't match with metadata") # noqa +@click.option('-t', '--timeout', default=1.0, help="Seconds to wait for data") +@click.option('-F', '--filter', is_flag=True, help="Drop scans missing data") +@click.option('-e', '--extrinsics', type=float, required=False, nargs=16, + help="Lidar sensor extrinsics to use in viz (instead possible" + " extrinsics stored in OSF). If more than one sensor is" + " stored in the osf file and this argument is used then" + " the same extrinsics will be applied to all sensors") +@click.option("--extrinsics-file", + type=click.Path(exists=True, dir_okay=False), + required=False, + help="Path to a file containing extrinscs. The parameter would be" + " superseeded by the --extrinscs parameter if both supplied") +def source(source, meta: Tuple[str, ...], lidar_port: int, imu_port: int, extrinsics: Optional[List[float]], + extrinsics_file: Optional[str], do_not_reinitialize: bool, no_auto_udp_dest: bool, + soft_id_check: bool, timeout: int, filter: bool): """Run a command with the specified source (SENSOR, PCAP, or OSF) as SOURCE. For example, a sensor source: ouster-cli source os1-992xxx.local viz """ pass + + +@source.result_callback() +@click.pass_context +def process_commands(click_ctx: click.core.Context, callbacks: Iterable[SourceCommandCallback], + source: str, meta: Tuple[str, ...], lidar_port: int, imu_port: int, + extrinsics: Optional[List[float]], extrinsics_file: Optional[str], + do_not_reinitialize: bool, no_auto_udp_dest: bool, soft_id_check: bool, + timeout: int, filter: bool) -> None: + """Process all commands in a SourceMultiCommand, using each command's callback""" + + callbacks = list(callbacks) + ctx: SourceCommandContext = click_ctx.obj + command_names = ctx.invoked_command_names + + # ---- Lint commands ---- + # Ensure that no commands are duplicated + names_duplicate_check = set() + for name in command_names: + if name in names_duplicate_check: + raise click.exceptions.UsageError(f"'{name}' is duplicated in the multi-command chain. " + "Please invoke it only once. ") + names_duplicate_check.add(name) + + # Ensure that no other commands are present, if a MULTICOMMAND_UNSUPPORTED + # is present + multicommand = True + for idx, c in enumerate(callbacks): + if c.type is SourceCommandType.MULTICOMMAND_UNSUPPORTED: + multicommand = False + if len(callbacks) != 1: + raise click.exceptions.UsageError(f"'{command_names[idx]}' does not support multi-command chaining. " + "Please invoke it without other commands. ") + + # Ensure that a consumer is always last + last_consumer_name, last_consumer_idx = None, None + last_processor_name, last_processor_idx = None, None + for idx, c in enumerate(callbacks): + if c.type is SourceCommandType.PROCESSOR: + last_processor_idx = idx + last_processor_name = command_names[idx] + elif c.type is SourceCommandType.CONSUMER: + last_consumer_idx = idx + last_consumer_name = command_names[idx] + + if multicommand: + if last_consumer_idx is None: + raise click.exceptions.UsageError("Must have a consumer such as 'viz' or 'save'.") + if (last_processor_idx is not None) and (last_processor_idx > last_consumer_idx): + raise click.exceptions.UsageError(f"'{last_processor_name}' must be invoked before " + f"'{last_consumer_name}'. Please reorder the multi-command chain. ") + + if not multicommand: + # Execute single non-multicommand command + c = callbacks[0] + c.callback_fn(ctx) + else: + # Execute multicommands + # Open source + + resolved_extrinsics: Optional[Union[str, np.ndarray]] = None + + if extrinsics_file: + resolved_extrinsics = extrinsics_file + if extrinsics: + resolved_extrinsics = np.array(extrinsics).reshape((4, 4)) + + ctx.scan_source = open_source(source, sensor_idx=0, + extrinsics=resolved_extrinsics, + meta=meta, + lidar_port=lidar_port, imu_port=imu_port, + do_not_reinitialize=do_not_reinitialize, + no_auto_udp_dest=no_auto_udp_dest, + soft_id_check=soft_id_check, + timeout=timeout, complete=filter, + buf_size=512) + # [kk] NOTE: buf_size is in terms of lidar_packets. In 4096x5 mode + # with 16 columns per packet, 512 packets are needed to fully + # buffer two frames + + if ctx.scan_source.is_indexed and len(ctx.scan_source) == 0: + print("WARNING: Source contains no scans.") + + ctx.scan_iter = iter(ctx.scan_source) + + # print any timeout exceptions we get + scans = ctx.scan_iter + + def catch_iter(): + try: + for scan in scans: + yield scan + except ClientTimeout as ex: + print(f"Error: {ex}") + except FileExistsError as ex: + print(f"Error: {ex}. Add --overwrite flag to overwrite and continue anyways.") + return + ctx.scan_iter = catch_iter() + try: + # Execute multicommand callbacks + + # Dataflow between callbacks occur entirely through the scan iterator. Callbacks may additionally + # register functions in ctx.thread_fns to be run in individual threads + # Callback invariants: + # 1. Must leave ctx.scan_iter in a consumable state after invocation. + # If a callback consumes the original ctx.scan_iter, it must set ctx.scan_iter + # to an unconsumed iterator (via CoupledTee or a new iterator) + # 2. May add a Callable[None -> None] to ctx.thread_fns. These will be executed in + # individual threads. + # 3. A single callback may also register a function to be run from the main thread in + # ctx.main_thread_fn. This is required to support viz on macOS, where openGL + # applications must be run from the main thread + # 4. Any registered thread_fn must support termination of execution via + # ctx.terminate_evt.is_set() + # 5. Every registered thread_fn must request the ouster-cli process terminate by calling + # ctx.terminate_evt.set() before terminating + + # Most callbacks will do one of the following: + # 1. execute immediately, and take no further action (ie, config) + # 2. map a processing function onto scans, such that processing is implicitly called while iterating + # 3. set ctx.scan_iter to a new iterator, and register a processing thread in ctx.thread_fns + # 4. create a CoupledTee from ctx.scan_iter, and re-set ctx.scan_iter to one of the resultant tees + + ctx.thread_fns = [] + ctx.main_thread_fn = None + ctx.terminate_evt = threading.Event() + for c in callbacks: + c.callback_fn(ctx) + + # Create threads from functions registered by the callbacks + threads = [] + for thread_fn in ctx.thread_fns: + threads.append(threading.Thread(target=thread_fn)) + + # Define a function to consume ctx.scan_iter + def pipeline_flush(): + try: + for _ in ctx.scan_iter: + pass + except Exception as ex: + # Terminate everything if we get an unhandled exception + ctx.terminate_evt.set() + raise ex + + threads.append(threading.Thread(target=pipeline_flush)) + + # Start all threads + for thread in threads: + thread.start() + + # Execute main thread fn, if set + if ctx.main_thread_fn is not None: + ctx.main_thread_fn() + ctx.terminate_evt.set() + + # Wait for threads to terminate + for thread in threads: + thread.join() + + true_source = ctx.scan_source._scan_source + try: + if true_source._source._id_error_count > 0: + print(f"WARNING: {true_source._source._id_error_count} lidar_packets with " + "mismatched init_id/sn were detected.") + if not soft_id_check: + print("NOTE: To disable strict init_id/sn checking use " + "--soft-id-check option (may lead to parsing " + "errors)") + except AttributeError: + pass # This source doesnt support _id_error_count + finally: + # Attempt to close scansource + try: + ctx.scan_source.close() + except: # noqa: E722 + pass diff --git a/python/src/ouster/cli/plugins/source_osf.py b/python/src/ouster/cli/plugins/source_osf.py index a2ab9b44..47c50138 100644 --- a/python/src/ouster/cli/plugins/source_osf.py +++ b/python/src/ouster/cli/plugins/source_osf.py @@ -1,151 +1,337 @@ -# type: ignore - import click -import ouster.cli.core.osf as osf_cli -from .io_type import OusterIoType -from .source import source, _source_arg_name, _output_file_arg_name, SourceConvertCommand +from typing import Any, Iterator, Dict, cast -# TODO[pb]: To be added later. -@click.command -@click.argument(_output_file_arg_name, required=True) -@click.option('-m', - '--meta', - required=False, - type=click.Path(exists=True, dir_okay=False, readable=True), - help="Metadata for PCAP, should automatic metadata resolution fail") -@click.option('-s', '--chunk-size', default=0, help="Chunk size in bytes") -@click.option('-f', - '--flags', - is_flag=True, - help='Add FLAGS/FLAGS2 to LidarScans') -@click.option('--raw-headers', - is_flag=True, - help='Add RAW_HEADERS to LidarScans') -@click.option('--raw-fields', - is_flag=True, - help='Add RAW32_WORDs to LidarScans') -@click.option('--extrinsics', - type=float, - required=False, - nargs=16, - help='Lidar sensor extrinsics to use (single sensor data only)') -@click.option('--multi', - is_flag=True, - help='Turn on multi sensor pcap handling and metadata resolutions') -@click.option('--soft-id-check', - is_flag=True, - help="Continue parsing lidar packets even if init_id/sn doesn't " - "match with metadata") +from .source_util import (SourceCommandContext, + SourceCommandType, + source_multicommand) + + +@click.group(name="osf", hidden=True) @click.pass_context -def osf_from_pcap(ctx, *args, **kwargs) -> None: - """Convert the source PCAP to OSF""" - # Implements ouster-cli source .pcap convert .osf - pass - # source = ctx.obj.get(_source_arg_name) - # osf_cli.osf_from_pcap_impl( - # source, meta, output_file, - # chunk_size, flags, raw_headers, - # raw_fields, extrinsics, - # multi, soft_id_check) +def osf_group(ctx) -> None: + """Commands for working with OSF files and converting data to OSF.""" + try: + from ouster.sdk.osf import _osf + except ImportError as e: + raise click.ClickException("Error: " + str(e)) + ctx.ensure_object(dict) + sdk_log_level = ctx.obj.get('SDK_LOG_LEVEL', None) + if sdk_log_level: + _osf.init_logger(sdk_log_level) @click.command @click.option('-s', '--short', is_flag=True, help='Print less metadata info') @click.pass_context -def osf_info(ctx, *args, **kwargs) -> None: - """Display metadata from the source OSF""" # Implements ouster-cli source .osf info - source = ctx.obj.get(_source_arg_name) - kwargs['file'] = source - # TODO refactor - ctx.forward(osf_cli.osf_info, *args, **kwargs) +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def osf_dump(ctx: SourceCommandContext, click_ctx: click.core.Context, short: bool) -> None: + """Print metdata information from an OSF file to stdout. + + Parses all metadata entries, output is in JSON format. + """ + file = ctx.source_uri or "" + try: + from ouster.sdk.osf import _osf + except ImportError as e: + raise click.ClickException("Error: " + str(e)) + + if not click_ctx.obj.get('SDK_LOG_LEVEL', None): + # If not SDK_LOG_LEVEL passed we set to "error" logging so to ensure + # that json output is not interferred with other SDK logging messages + # and thus ruining valid json structure + _osf.init_logger("error") + + print(_osf.dump_metadata(file, not short)) @click.command -@click.option('-e', - '--on-eof', - default='loop', - type=click.Choice(['loop', 'stop', 'exit']), - help="Loop, stop, or exit after reaching end of file") -@click.option("-p", "--pause", is_flag=True, help="Pause at first lidar scan") -@click.option("--pause-at", - default=-1, - help="Lidar Scan number to pause") -@click.option("-r", "--rate", default=1.0, help="Playback rate") -@click.option("--extrinsics", - type=float, - required=False, - nargs=16, - help="Lidar sensor extrinsics to use in viz (instead possible " - " extrinsics stored in OSF)") -@click.option("--skip-extrinsics", - is_flag=True, - help="Don't use any extrinsics (leaves them at Identity)") -@click.option("-s", - "--start-ts", - type=int, - required=False, - default=0, - help="Viz from the provided start timestamp (nanosecs)") -@click.option("--sensor-id", +@click.option('-n', type=int, - required=False, default=0, - help="Viz only the single sensor by sensor_id") -@click.option("--multi", - is_flag=True, - help="Use multi sensor viz") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", + help="Index of lidar") +@click.pass_context +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def osf_metadata(ctx: SourceCommandContext, click_ctx: click.core.Context, n: int) -> None: + """ + Display sensor metadata about the SOURCE. + """ + file = ctx.source_uri or "" + try: + import ouster.sdk.osf as osf + except ImportError as e: + raise click.ClickException("Error: " + str(e)) + + reader = osf.Reader(file) + msensors = reader.meta_store.find(osf.LidarSensor) + index = 0 + for sensor_id, sensor_meta in msensors.items(): + if index == n: + print(sensor_meta.info.original_string()) + return + index = index + 1 + + raise click.ClickException(f"Sensor Index {n} Not Found") + + +@click.command +@click.option('-v', + '--verbose', is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") + help="Print additional information about the file") @click.pass_context -def osf_viz(ctx, *args, **kwargs) -> None: - """Visualize Lidar Scan Data from an OSF file.""" - source = ctx.obj.get(_source_arg_name) - kwargs['file'] = source - # TODO refactor - ctx.forward(osf_cli.osf_viz, *args, **kwargs) +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def osf_info(ctx: SourceCommandContext, click_ctx: click.core.Context, + verbose: bool) -> None: + """ + Read an OSF file and print messages type, timestamp and counts to stdout. + Useful to check chunks layout and decoding of all known messages (-d option). + """ + file = ctx.source_uri or "" + try: + import ouster.sdk.osf as osf + except ImportError as e: + raise click.ClickException("Error: " + str(e)) + + from ouster.sdk.client._client import get_field_types + from ouster.sdk.osf._osf import LidarScanStream + import os + + reader = osf.Reader(file) + + orig_layout = "Streaming" if reader.has_stream_info else "Standard" + if orig_layout == "Streaming" and reader.has_message_idx: + orig_layout = "Streaming, Indexed" + # count of messages in each stream + lidar_streams: Dict[str, Dict[str, Any]] + lidar_streams = {} + other_streams: Dict[str, Dict[str, Any]] + other_streams = {} -# TODO SW-4407: various OSF convert stories -class OsfConvertCommand(SourceConvertCommand): - """Implements - ouster-cli source .osf convert + start = 0 + end = 0 + count = 0 + size = os.path.getsize(file) - This method delegates to the appropriate command depending on the file - extension of the output file argument. + sensors = {} + msensors = reader.meta_store.find(osf.LidarSensor) + for sensor_id, sensor_meta in msensors.items(): + info = sensor_meta.info + sensors[sensor_id] = info + + messages = [it for it in reader.messages()] + for msg in messages: + count = count + 1 + if start == 0: + start = msg.ts + else: + start = min(msg.ts, start) + end = max(msg.ts, end) + obj: Dict[str, Any] + if not msg.of(LidarScanStream): + if msg.id not in other_streams: + obj = {} + obj["count"] = 1 + obj["start"] = obj["end"] = msg.ts + obj["type"] = reader.meta_store[msg.id].type + other_streams[msg.id] = obj + else: + obj = other_streams[msg.id] + obj["count"] = obj["count"] + 1 + obj["end"] = max(obj["end"], msg.ts) + else: + ls = msg.decode() + if ls: + if msg.id not in lidar_streams: + # get sensor id + obj = {} + obj["count"] = 1 + obj["start"] = msg.ts + obj["end"] = msg.ts + obj["type"] = reader.meta_store[msg.id].type + obj["fields"] = get_field_types(ls) + obj["sensor"] = sensors[reader.meta_store[msg.id].sensor_meta_id] + lidar_streams[msg.id] = obj + else: + obj = lidar_streams[msg.id] + obj["count"] = obj["count"] + 1 + obj["end"] = max(obj["end"], msg.ts) + if get_field_types(ls) != obj["fields"]: + print("WARNING: fields not equal!") + obj["fields"] = None + + print(f"Filename: {file}\nLayout: {orig_layout}") + print(f"Metadata ID: '{reader.metadata_id}'") + print(f"Size: {size/1000000} MB") + print(f"Start: {start/1000000000.0}") + print(f"End: {end/1000000000.0}") + print(f"Duration: {(end-start)/1000000000.0}") + print(f"Messages: {count}\n") + + # print out info about each stream + for k in lidar_streams: + stream = lidar_streams[k] + count = stream["count"] + start = stream['start'] / 1000000000.0 + end = stream['end'] / 1000000000.0 + sensor = stream["sensor"] + print(f"Stream {k} {stream['type']}: ") + print(f" Scan Count: {count}") + print(f" Start: {start}") + print(f" End: {end}") + print(f" Duration: {end-start} seconds") + print(f" Rate: {count/(end-start)} Hz") + print(f" Product Line: {sensor.prod_line}") + print(f" Sensor Mode: {sensor.mode}") + if verbose: + print(f" Sensor SN: {sensor.sn}") + print(f" Sensor FW Rev: {sensor.fw_rev}") + print(" Fields:") + if stream["fields"] is None: + print(" NO CONSISTENT FIELD TYPE") + else: + for f in stream["fields"]: + print(f" {f}:{stream['fields'][f]}") + + for k in other_streams: + stream = other_streams[k] + count = stream["count"] + start = stream['start'] / 1000000000.0 + end = stream['end'] / 1000000000.0 + print(f"Stream {k} {stream['type']}: ") + print(f" Message Count: {count}") + print(f" Start: {start}") + print(f" End: {end}") + print(f" Duration: {end-start} seconds") + print(f" Rate: {count/(end-start)} Hz") + + +@click.command +@click.option('-d', '--decode', is_flag=True, help="Decode messages") +@click.option('-v', + '--verbose', + is_flag=True, + help="Verbose LidarScan outputs (only when used with -d option)") +@click.option('-r', + '--check-raw-headers', + is_flag=True, + help="Check RAW_HEADERS fields by reconstructing lidar_packets" + " and batching LidarScan back (without fields data) and compare." + "(applies only when used with -d option)") +@click.option('-s', + '--standard', + is_flag=True, + help="Show standard layout with chunks") +@click.pass_context +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def osf_parse(ctx: SourceCommandContext, click_ctx: click.core.Context, + decode: bool, verbose: bool, check_raw_headers: bool, + standard: bool) -> None: + """ + Read an OSF file and print messages type, timestamp and counts to stdout. + Useful to check chunks layout and decoding of all known messages (-d option). """ - conversions = { - } - - -# add OSF commands to `source` command -source.commands[OusterIoType.OSF] = { - # TODO 4407 various stories for OSF conversion - 'convert': OsfConvertCommand( - 'convert', - context_settings=dict(ignore_unknown_options=True, allow_extra_args=True), - help="Save point cloud from an OSF file into specific formats" - ), - 'info': osf_info, - 'viz': osf_viz, -} - - -# add conversions to OSF from other formats -# TODO[pb]: To be added later -source.commands[OusterIoType.PCAP]['convert'].conversions[OusterIoType.OSF] = osf_from_pcap + file = ctx.source_uri or "" + try: + from ouster.sdk import client + import ouster.sdk.osf as osf + except ImportError as e: + raise click.ClickException("Error: " + str(e)) + + # NOTE[pb]: Mypy quirks or some of our Python packages structure quirks, idk :( + from ouster.sdk.client._client import get_field_types + from ouster.sdk.util.parsing import scan_to_packets, packets_to_scan, cut_raw32_words # type: ignore + + reader = osf.Reader(file) + + orig_layout = "STREAMING" if reader.has_stream_info else "STANDARD" + + print(f"filename: {file}, layout: {orig_layout}") + + # map stream_id to metadata entry + scan_stream_sensor: Dict[int, osf.LidarSensor] + scan_stream_sensor = {} + for scan_stream_id, scan_stream_meta in reader.meta_store.find( + osf.LidarScanStream).items(): + scan_stream_sensor[scan_stream_id] = reader.meta_store[ + scan_stream_meta.sensor_meta_id] + + ls_cnt = 0 + other_cnt = 0 + + def proc_msgs(msgs: Iterator[osf.MessageRef]): + nonlocal ls_cnt, other_cnt, decode + for m in msgs: + if m.of(osf.LidarScanStream): + prefix = "Ls" + ls_cnt += 1 + else: + prefix = "UN" + other_cnt += 1 + d = "" + verbose_str = "" + if decode: + obj = m.decode() + d = "[D]" if obj else "" + if m.of(osf.LidarScanStream): + ls = cast(client.LidarScan, obj) + + d = d + \ + (" [poses: YES]" if client.poses_present(ls) else "") + + if verbose: + verbose_str += f"{ls}" + + if check_raw_headers: + d = d + " " if d else "" + if client.ChanField.RAW_HEADERS in ls.fields: + sinfo = scan_stream_sensor[m.id].info + + # roundtrip: LidarScan -> packets -> LidarScan + packets = scan_to_packets(ls, sinfo) + + # recovered lidar scan + field_types = get_field_types(ls) + ls_rec = packets_to_scan( + packets, sinfo, fields=field_types) + + ls_no_raw32 = cut_raw32_words(ls) + ls_rec_no_raw32 = cut_raw32_words(ls_rec) + + assert ls_rec_no_raw32 == ls_no_raw32, "LidarScan should be" \ + " equal when recontructed from RAW_HEADERS fields" \ + " packets back" + + d += "[RAW_HEADERS: OK]" + else: + d += "[RAW_HEADERS: NONE]" + + print(f" {prefix}\tts: {m.ts}\t\tstream_id: {m.id}\t{d}") + if verbose_str: + print(60 * '-') + print(f"{verbose_str}") + print(60 * '-') + + if not standard and reader.has_stream_info: + proc_layout = "STREAMING" + proc_msgs(reader.messages()) + else: + proc_layout = "STANDARD" + for chunk in reader.chunks(): + print(f"Chunk [{chunk.offset}\t\t]: start_ts = {chunk.start_ts}, " + f"end_ts = {chunk.end_ts}") + proc_msgs(iter(chunk)) + + showed_as_str = "" + if orig_layout != proc_layout: + showed_as_str = f"showed as: {proc_layout}" + + print() + print(f"SUMMARY: [layout: {orig_layout}] {showed_as_str}") + print(f" lidar_scan (Ls) count = {ls_cnt}") + print(f" other count = {other_cnt}") diff --git a/python/src/ouster/cli/plugins/source_pcap.py b/python/src/ouster/cli/plugins/source_pcap.py new file mode 100644 index 00000000..f1367f58 --- /dev/null +++ b/python/src/ouster/cli/plugins/source_pcap.py @@ -0,0 +1,92 @@ +# type: ignore +from datetime import datetime +import os + +import click +from prettytable import PrettyTable, PLAIN_COLUMNS # type: ignore +from textwrap import indent + +from .source_util import (SourceCommandContext, + SourceCommandType, + source_multicommand) + + +@click.group(name="pcap", hidden=False) +def pcap_group() -> None: + """Commands for working with pcap files.""" + pass + + +def print_stream_table(all_infos): + # format output + table = PrettyTable() + table.field_names = [ + '', 'Src IP', 'Dst IP', 'Src Port', 'Dst Port', 'AF', 'Frag', 'Size', + 'Count' + ] + + def stream_sort(k): + return (list(k)[0].dst_ip, list(k)[0].src_ip, list(k)[0].dst_port) + + for k, v in sorted(all_infos.udp_streams.items(), key=stream_sort): + frag = 'No' if (len(v.fragment_counts) == 1) and (1 in v.fragment_counts) else 'Yes' + + first = True + af_count = len(v.payload_size_counts.items()) + for af_key, af_value in v.ip_version_counts.items(): + size_count = len(v.payload_size_counts.items()) + for size_key, size_value in v.payload_size_counts.items(): + cont = "" + + if (size_count > 1 or af_count > 1): + cont = 'X' if first else '↳' + + table.add_row([ + cont, k.src_ip, k.dst_ip, k.src_port, k.dst_port, af_key, frag, size_key, size_value + ]) + first = False + table.set_style(PLAIN_COLUMNS) + table.align = 'r' + table.align['Src IP'] = 'l' # type: ignore + click.echo(click.style(indent(str(table), ' '), fg='yellow')) + + +@click.command +@click.option('-n', type=int, default=-1, help="Read only INTEGER packets.") +@click.pass_context +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def pcap_info(ctx: SourceCommandContext, click_ctx: click.core.Context, n: int) -> None: + """Print information about a pcap file to stdout.""" + file = ctx.source_uri + try: + import ouster.sdk.pcap as pcap + except ImportError: + raise click.ClickException("Please verify that libpcap is installed") + + pcap_size = os.path.getsize(file) + # read full pcap with progress bar + all_infos = None + with click.progressbar(length=pcap_size, label="Reading pcap:") as bar: + def progress_callback(current, diff, total): + bar.update(diff) + all_infos = pcap._packet_info_stream(file, n, progress_callback, 100) + bar.update(pcap_size) + + encap = { + 0: '', + 1: 'ETHERNET', + 42: 'SLL' + }.get(all_infos.encapsulation_protocol, 'UNKNOWN') + min_datetime = datetime.fromtimestamp(all_infos.timestamp_min) + max_datetime = datetime.fromtimestamp(all_infos.timestamp_max) + duration = max_datetime - min_datetime + + click.echo(f"File size: {pcap_size / (2**20):.2f}M") + click.echo(f"Packets read: {all_infos.total_packets}") + click.echo(f"Encapsulation: {encap}") + click.echo(f"Capture start: {min_datetime}") + click.echo(f"Capture end: {max_datetime}") + click.echo(f"Duration: {duration}") + click.echo("UDP Streams:") + print_stream_table(all_infos) diff --git a/python/src/ouster/cli/plugins/source_save.py b/python/src/ouster/cli/plugins/source_save.py new file mode 100644 index 00000000..46c84126 --- /dev/null +++ b/python/src/ouster/cli/plugins/source_save.py @@ -0,0 +1,599 @@ +from ouster.cli.core.cli_args import CliArgs +import atexit +import click +import os +import time +from datetime import datetime +from pathlib import Path +import numpy as np +from typing import (Tuple, List, Iterator, Union) +from ouster.cli.core import SourceArgsException # type: ignore[attr-defined] +from ouster.sdk.client import (first_valid_packet_ts, + first_valid_column_ts, + UDPProfileLidar, LidarScan, ChanField, XYZLut, + ScanSource, destagger, SensorInfo, + LidarPacket, ImuPacket, ScanSourceAdapter) +from ouster.sdk import osf +from ouster.sdk.io_type import (io_type_from_extension, + OusterIoType) +from ouster.sdk.pcap import BagRecordingPacketSource, RecordingPacketSource, PcapScanSource +from ouster.sdk.sensor import SensorScanSource +from ouster.sdk.util import scan_to_packets # type: ignore +from ouster.sdk.pcap.pcap import MTU_SIZE +import ouster.sdk.pcap._pcap as _pcap +from .source_util import (SourceCommandContext, + SourceCommandType, + source_multicommand, + _join_with_conjunction, + import_rosbag_modules) +from contextlib import closing + +_file_exists_error = lambda filename: (f"Error: File '{filename}' already exists. Add --overwrite " + "flag to overwrite and continue anyways.") + + +@click.command(context_settings=dict( + ignore_unknown_options=True, + allow_extra_args=True, +)) +@click.argument("filename", required=True) +@click.option('-p', '--prefix', default="", help="Output prefix.") +@click.option('-d', '--dir', default="", help="Output directory.") +@click.option('--chunk-size', default=0, help="Split output by size (MB)") +@click.option('--overwrite', is_flag=True, default=False, help="If true, overwrite existing files with the same name.") +@click.option('-r', '--raw', is_flag=True, default=False, help="Save in raw mode, " + "where LidarPackets and ImuPackets from compatible sources are saved directly. " + "This mode does not preserve LidarScan transformations performed by other commands " + "in a multi-command chain. This mode preserves LEGACY ImuPackets.") +@click.pass_context +@source_multicommand(type=SourceCommandType.CONSUMER) +def source_save_pcap(ctx: SourceCommandContext, prefix: str, dir: str, filename: str, + chunk_size: int, raw: bool, overwrite: bool, **kwargs) -> None: + """Save source as a PCAP""" + scan_source = ctx.scan_source + scans = ctx.scan_iter + info = scan_source.metadata # type: ignore + + # Automatic file naming + filename = determine_filename(filename=filename, info=info, extension=".pcap", prefix=prefix, dir=dir) + + create_directories_if_missing(filename) + + filename = filename[0:-5] # remove extension + + if os.path.isfile(f'{filename}.json') and not overwrite: + click.echo(_file_exists_error(f'{filename}.json')) + exit(1) + + # Save metadata as json + with open(f"{filename}.json", 'w') as f: + f.write(info.updated_metadata_string()) + + if raw: + scan_source = None + if isinstance(ctx.scan_source, ScanSourceAdapter): + if isinstance(ctx.scan_source._scan_source, SensorScanSource): + scan_source = ctx.scan_source._scan_source._scans # type: ignore + elif isinstance(ctx.scan_source._scan_source, PcapScanSource): + scan_source = ctx.scan_source._scan_source # type: ignore + + if scan_source is None: + # [kk] TODO: Only single-source via ScanSourceAdapter is currently supported. + # Revisit when implmenting multi source in CLI + raise click.exceptions.BadParameter("Saving in -r/--raw mode is not supported with " + "the current source type.") + + if len(ctx.invoked_command_names) != 1: + click.echo("Warning: Saving pcap in -r/--raw mode will drop any LidarScan " + "transformations perfomed by other commands in this multi-command chain: " + f"{', '.join([c for c in ctx.invoked_command_names if c != 'save'])}.") + + # replace ScanSource's packetsource with RecordingPacketSource + scan_source._source = RecordingPacketSource( + scan_source._source, n_frames=None, + prefix_path=filename, chunk_size=chunk_size, overwrite=overwrite, + lidar_port=info.udp_port_lidar, imu_port=info.udp_port_imu + ) + else: + click.echo("Warning: Saving pcap without -r/--raw will not save LEGACY IMU packets.") + + if os.path.isfile(f'{filename}.pcap') and not overwrite: + click.echo(_file_exists_error(f'{filename}.pcap')) + exit(1) + + # Initialize pcap writer + pcap_record_handle = _pcap.record_initialize(f"{filename}.pcap", MTU_SIZE, False) + + def save_iter(): + try: + for scan in scans: + # [kk] TODO: implement chunk-size + packets = scan_to_packets(scan, info) + for packet in packets: + ts = packet.capture_timestamp or time.time() + _pcap.record_packet(pcap_record_handle, "127.0.0.1", + "127.0.0.1", info.udp_port_lidar, + info.udp_port_lidar, packet._data, ts) + yield scan + except (KeyboardInterrupt, StopIteration): + pass + finally: + # Finish pcap_recording when this generator is garbage collected + _pcap.record_uninitialize(pcap_record_handle) + click.echo(f"Saving PCAP file at {filename}.pcap") + ctx.scan_iter = save_iter() + + click.echo(f"Saving metadata json at {filename}.json") + + +@click.command(context_settings=dict( + ignore_unknown_options=True, + allow_extra_args=True, +)) +@click.argument("filename", required=True) +@click.option('-p', '--prefix', default="", help="Output prefix.") +@click.option('-d', '--dir', default="", help="Output directory.") +@click.option('-c', '--continue-anyways', is_flag=True, default=False, help="Continue saving " + "scans after an error is encountered, dropping bad data if necessary.") +@click.option('--overwrite', is_flag=True, default=False, help="If true, overwrite existing files with the same name.") +@click.option("--ts", default='packet', help="Timestamp to use for indexing.", type=click.Choice(['packet', 'lidar'])) +@click.pass_context +@source_multicommand(type=SourceCommandType.CONSUMER) +def source_save_osf(ctx: SourceCommandContext, prefix: str, dir: str, filename: str, + overwrite: bool, ts: str, continue_anyways: bool, **kwargs) -> None: + """Save source as an OSF""" + scans = ctx.scan_iter + info = ctx.scan_source.metadata # type: ignore + + # Automatic file naming + filename = determine_filename(filename=filename, info=info, extension=".osf", prefix=prefix, dir=dir) + + create_directories_if_missing(filename) + + click.echo(f"Saving OSF file at {filename}") + + if os.path.isfile(filename) and not overwrite: + click.echo(_file_exists_error(filename)) + exit(1) + + # Initialize osf writer + osf_writer = osf.Writer(filename, info) + + # TODO: extrinsics still need to be plugged in here -- Tim T. + wrote_scans = False + dropped_scans = 0 + ts_method = first_valid_packet_ts if ts == "packet" else first_valid_column_ts + last_ts = 0 + + # returns false if we should stop recording + def write_osf(scan: LidarScan): + nonlocal wrote_scans, last_ts, dropped_scans + # Set OSF timestamp to the timestamp of the first valid column + scan_ts = ts_method(scan) + if scan_ts: + if scan_ts < last_ts: + if continue_anyways: + dropped_scans = dropped_scans + 1 + return True + else: + print("WARNING: Stopped saving because scan timestamps jumped backwards which is " + "not supported by OSF. Try with `-c` to drop these scans and continue " + "anyways.") + osf_writer.close() + return False + wrote_scans = True + osf_writer.save(0, scan, scan_ts) + last_ts = scan_ts + else: + # by default fail out + if not continue_anyways: + osf_writer.close() + os.remove(filename) + print("ERROR: Cannot save scans because they are missing packet timestamps." + " Try with `--ts lidar` instead or `-c` to continue anyways.") + raise ValueError("Bad timestamps") + dropped_scans = dropped_scans + 1 + return True + + def save_iter(): + try: + with closing(osf_writer): + stop = False + for scan in scans: + # Drop invalid lidarscans + if not stop and np.any(scan.status): + if not write_osf(scan): + stop = True + yield scan + except (KeyboardInterrupt): + pass + except (ValueError): + ctx.terminate_evt.set() + + ctx.scan_iter = save_iter() + + def exit_print(): + if dropped_scans > 0: + if ts == "lidar": + click.echo(f"WARNING: Dropped {dropped_scans} scans because missing or decreasing timestamps.") + else: + click.echo(f"WARNING: Dropped {dropped_scans} scans because missing or decreasing " + "packet timestamps. Try with `--ts lidar` instead.") + if not wrote_scans: + click.echo("WARNING: No scans saved.") + atexit.register(exit_print) + + +@click.command(context_settings=dict( + ignore_unknown_options=True, + allow_extra_args=True, +)) +@click.argument("filename", required=True) +@click.option('-p', '--prefix', default="", help="Output prefix.") +@click.option('-d', '--dir', default="", help="Output directory.") +@click.option('--overwrite', is_flag=True, default=False, help="If true, overwrite existing files with the same name.") +@click.pass_context +@source_multicommand(type=SourceCommandType.CONSUMER) +def source_save_csv(ctx: SourceCommandContext, prefix: str, + dir: str, filename: str, overwrite: bool, **kwargs) -> None: + """Save source as one CSV file per LidarScan.""" + ctx.scan_iter = source_to_csv_iter(ctx.scan_iter, ctx.scan_source.metadata, # type: ignore + prefix=prefix, dir=dir, filename=filename, + overwrite=overwrite) + + +def source_to_csv_iter(scan_iter: Iterator[LidarScan], info: SensorInfo, + prefix: str = "", dir: str = "", overwrite: bool = True, + filename: str = "") -> Iterator[LidarScan]: + """Create a CSV saving iterator from a LidarScan iterator + + The number of saved lines per csv file is always H x W, which corresponds to + a full 2D image representation of a lidar scan. + + Each line in a csv file is (for DUAL profile): + + TIMESTAMP (ns), RANGE (mm), RANGE2 (mm), SIGNAL (photons), + SIGNAL2 (photons), REFLECTIVITY (%), REFLECTIVITY2 (%), + NEAR_IR (photons), X (m), Y (m), Z (m), X2 (m), Y2 (m), Z2(m), + MEASUREMENT_ID, ROW, COLUMN + """ + + dual = False + if info.format.udp_profile_lidar in [UDPProfileLidar.PROFILE_LIDAR_RNG19_RFL8_SIG16_NIR16_DUAL]: + dual = True + print("Note: You've selected to convert a dual returns pcap to CSV. Each row " + "will represent a single pixel, so that both returns for that pixel will " + "be on a single row. As this is an example we provide for getting " + "started, we realize that you may have conversion needs which are not met " + "by this function. You can find the source code on the Python SDK " + "documentation website to modify it for your own needs.") + + # Build filename + filename = determine_filename(filename=filename, info=info, extension=".csv", prefix=prefix, dir=dir) + + create_directories_if_missing(filename) + + filename = filename[0:-4] # remove extension + + click.echo(f"Saving CSV file at {filename}.csv") + + # Construct csv header and data format + def get_fields_info(scan: LidarScan) -> Tuple[str, List[str]]: + field_names = 'TIMESTAMP (ns), ROW, DESTAGGERED IMAGE COLUMN, MEASUREMENT_ID' + field_fmts = ['%d'] * 4 + for chan_field in scan.fields: + field_names += f', {chan_field}' + if chan_field in [ChanField.RANGE, ChanField.RANGE2]: + field_names += ' (mm)' + if chan_field in [ChanField.REFLECTIVITY, ChanField.REFLECTIVITY2]: + field_names += ' (%)' + if chan_field in [ChanField.SIGNAL, ChanField.SIGNAL2, + ChanField.NEAR_IR]: + field_names += ' (photons)' + field_fmts.append('%d') + field_names += ', X1 (m), Y1 (m), Z1 (m)' + field_fmts.extend(3 * ['%.4f']) + if dual: + field_names += ', X2 (m), Y2 (m), Z2 (m)' + field_fmts.extend(3 * ['%.4f']) + return field_names, field_fmts + + field_names: str = '' + field_fmts: List[str] = [] + + # [doc-stag-pcap-to-csv] + # {recompute xyzlut to save computation in a loop + xyzlut = XYZLut(info) + + row_layer = np.fromfunction(lambda i, j: i, + (info.format.pixels_per_column, + info.format.columns_per_frame), dtype=int) + column_layer = np.fromfunction(lambda i, j: j, + (info.format.pixels_per_column, + info.format.columns_per_frame), dtype=int) + column_layer_staggered = destagger(info, column_layer, + inverse=True) + + def save_iter(): + nonlocal field_names, field_fmts + try: + for idx, scan in enumerate(scan_iter): + + # Initialize the field names for csv header + if not field_names or not field_fmts: + field_names, field_fmts = get_fields_info(scan) + + # Copy per-column timestamps and measurement_ids for each beam + timestamps = np.tile(scan.timestamp, (scan.h, 1)) + measurement_ids = np.tile(scan.measurement_id, (scan.h, 1)) + + # Grab channel data + fields_values = [scan.field(ch) for ch in scan.fields] + + frame = np.dstack((timestamps, row_layer, column_layer_staggered, + measurement_ids, *fields_values)) + + # Output points in "image" vs. staggered order + frame = destagger(info, frame) + + # Destagger XYZ separately since it has a different type + xyz = xyzlut(scan.field(ChanField.RANGE)) + xyz_destaggered = destagger(info, xyz) + + if dual: + xyz2 = xyzlut(scan.field(ChanField.RANGE2)) + xyz2_destaggered = destagger(info, xyz2) + + # Get all data as one H x W x num fields int64 array for savetxt() + frame = np.dstack(tuple(map(lambda x: x.astype(object), + (frame, xyz_destaggered, xyz2_destaggered)))) + + else: + # Get all data as one H x W x num fields int64 array for savetxt() + frame = np.dstack(tuple(map(lambda x: x.astype(object), + (frame, xyz_destaggered)))) + + frame_colmajor = np.swapaxes(frame, 0, 1) + + # Write csv out to file + csv_path = f"{filename}_{idx}.csv" + print(f'write frame index #{idx}, to file: {csv_path}') + + if os.path.isfile(csv_path) and not overwrite: + print(_file_exists_error(csv_path)) + exit(1) + + header = '\n'.join([f'frame num: {idx}', field_names]) + + np.savetxt(csv_path, + frame_colmajor.reshape(-1, frame.shape[2]), + fmt=field_fmts, + delimiter=',', + header=header) + + yield scan + except (KeyboardInterrupt, StopIteration): + pass + + return save_iter() + + +# Determines the filename to use +def determine_filename(prefix: str, dir: str, filename: str, extension: str, info: SensorInfo): + outpath = Path.cwd() + if dir: + outpath = Path(dir) + + time_str = datetime.now().strftime("%Y%m%d_%H%M%S") + prefix = f"{prefix}_" if prefix else prefix + + if filename != "": + filename = str(outpath / f"{prefix}{filename}") + else: + filename = str(outpath / f"{prefix}{info.prod_line}_{info.fw_rev}_{info.mode}_{time_str}{extension}") + + return filename + + +# Creates path to file if any folders in the chain are missing +def create_directories_if_missing(filename: str): + outpath = Path(filename).parents[0] + if not outpath.is_dir(): + outpath.mkdir(parents=True) + + +@click.command(context_settings=dict( + ignore_unknown_options=True, + allow_extra_args=True, +)) +@click.argument("filename", required=True) +@click.option('-p', '--prefix', default="", help="Output prefix.") +@click.option('-d', '--dir', default="", help="Output directory.") +@click.option('--overwrite', is_flag=True, default=False, help="If true, overwrite existing files with the same name.") +@click.option('-r', '--raw', is_flag=True, default=False, help="Save in raw mode, " + "where LidarPackets and ImuPackets from compatible sources are saved directly. " + "This mode does not preserve LidarScan transformations performed by other commands " + "in a multi-command chain. This mode preserves LEGACY ImuPackets.") +@click.pass_context +@source_multicommand(type=SourceCommandType.CONSUMER) +def source_save_bag(ctx: SourceCommandContext, prefix: str, dir: str, filename: str, + raw: bool, overwrite: bool, **kwargs) -> None: + """Save source as a packet rosbag.""" + if raw: + _ = source_to_bag_iter(ctx.scan_source, ctx.scan_source.metadata, save_source_packets=True, # type: ignore + prefix=prefix, dir=dir, filename=filename, overwrite=overwrite) + else: + ctx.scan_iter = source_to_bag_iter(ctx.scan_iter, ctx.scan_source.metadata, # type: ignore + prefix=prefix, dir=dir, filename=filename, + overwrite=overwrite) + + +def source_to_bag_iter(scans: Union[ScanSource, Iterator[LidarScan]], info: SensorInfo, + sensor_idx: int = 0, save_source_packets: bool = False, prefix: str = "", + dir: str = "", filename: str = "", overwrite: bool = False) -> Iterator[LidarScan]: + """Create a ROSBAG saving iterator from a LidarScan iterator + + Requires the active ROS environment or ROS-less rospy/rosbag python + modules installed. See error message for details. + + If save_source_packets is selected, the raw packets from scans (which must be a compatible + PacketSource - PCAP, BAG, or Live Sensor) are saved directly. + + Otherwise, each LidarScan in scans is deparsed into UDP packets and saved. + """ + + try: + import ouster.sdk.pcap as pcap # noqa: F401 + except ImportError: + raise click.ClickException("Please verify that libpcap is installed") + + # Check that ROS imports are available + import_rosbag_modules(raise_on_fail=True) + from ouster.sdk.bag import PacketMsg # type: ignore + import rosbag # type: ignore + import rospy # type: ignore + + # Build filename + filename = determine_filename(filename=filename, info=info, extension=".bag", prefix=prefix, dir=dir) + + if os.path.isfile(filename) and not overwrite: + print(_file_exists_error(filename)) + exit(1) + + create_directories_if_missing(filename) + + lidar_topic = "/os_node/lidar_packets" + imu_topic = "/os_node/imu_packets" + + click.echo(f"Saving ROSBAG file at {filename}") + + if not save_source_packets: + def save_iter(): + try: + with rosbag.Bag(filename, 'w') as outbag: + for scan in scans: + packets = scan_to_packets(scan, info) + for packet in packets: + ts = rospy.Time.from_sec(packet.capture_timestamp) + msg = PacketMsg(buf=packet._data.tobytes()) + if isinstance(packet, LidarPacket): + outbag.write(lidar_topic, msg, ts) + elif isinstance(packet, ImuPacket): + outbag.write(imu_topic, msg, ts) + yield scan + except (KeyboardInterrupt, StopIteration): + pass + + return save_iter() + else: + scan_source = None + if isinstance(scans, ScanSourceAdapter): + if isinstance(scans._scan_source, SensorScanSource): + scan_source = scans._scan_source._scans # type: ignore + elif isinstance(scans._scan_source, PcapScanSource): + scan_source = scans._scan_source # type: ignore + + if scan_source is None: + # [kk] TODO: Only single-source via ScanSourceAdapter is currently supported. + # Revisit when implmenting multi source in CLI + raise click.exceptions.BadParameter("Saving in -r/--raw mode is not supported with " + "the current source type.") + + # replace ScanSource's packetsource with BagRecordingPacketSource + scan_source._source = BagRecordingPacketSource( + scan_source._source, filename, lidar_topic=lidar_topic, imu_topic=imu_topic + ) + + return scans # type: ignore + + +class SourceSaveCommand(click.Command): + """Generalizes ouster-cli source <> save + """ + + # Map from output type to a save implementation function + implementations = { + OusterIoType.OSF: source_save_osf, + OusterIoType.PCAP: source_save_pcap, + OusterIoType.CSV: source_save_csv, + OusterIoType.BAG: source_save_bag, + } + + def __init__(self, *args, **kwargs): + kwargs['add_help_option'] = True + super().__init__(*args, **kwargs) + self.update_help() + self.update_params() + + def update_help(self): + help_str = "Save to an " + help_str += _join_with_conjunction([k.name.upper() for k in self.implementations.keys()]) + help_str += " with the given filename. If only an extension is provided, the file is named automatically." + self.help = help_str + + def update_params(self): + # Add click options/parameters from save implementation commands + param_mapping = {} + for (iotype, cmd) in self.implementations.items(): + for p in cmd.params: + if p.name in param_mapping.keys(): + param_mapping[p.name][1].append(iotype) + else: + param_mapping[p.name] = (p, [iotype]) + + # Prefix options/parameters with name of the output iotype + self.params = [] + for (_, (param, iotypes)) in param_mapping.items(): + if len(iotypes) < len(self.implementations): + help_prefix = "|".join([k.name.upper() for k in iotypes]) + help_prefix = f"[{help_prefix}]:" + # Click calls this init function multiple times on --help. + # Check that the help string has not already been prepended with param.help + if help_prefix not in param.help: + param.help = f"{help_prefix} {param.help}" + self.params.append(param) + + def get_help(self, *args, **kwargs): + # Update help text to capture changes from lazily loaded save implementations + self.update_help() + return super().get_help(*args, **kwargs) + + def get_params(self, *args, **kwargs): + # Update params to capture changes from lazily loaded save implementations + self.update_params() + return super().get_params(*args, **kwargs) + + def invoke(self, ctx, *args): + output_name = ctx.params.get('filename') + output_format = "" + + split = os.path.splitext(output_name) + if split[0][0] == '.' and split[1] == "": + output_format = split[0].replace(".", "") + ctx.params["filename"] = "" + elif split[1] == "": + click.echo("Error: Must provide a filename with an extension.") + exit(2) + else: + output_format = split[1].replace(".", "") + + # Ensure the file extension is present and a valid one + supported_formats = [iotype.name.upper() for iotype in self.implementations.keys()] + if output_format.upper() not in supported_formats: + string = f"Error: Invalid file extension. '.{output_format.lower()}' is not one of " + string += _join_with_conjunction([f".{x.lower()}" for x in supported_formats]) + + click.echo(string + ".") + exit(2) + + ctx.params["format"] = output_format + output_type = io_type_from_extension(f" .{output_format}") + convert_command = self.implementations[output_type] + if CliArgs().has_any_of(ctx.help_option_names): + click.echo(convert_command.get_help(ctx)) + else: + try: + return ctx.forward(convert_command) + except TypeError: + if len(ctx.args) > 0: + raise SourceArgsException(ctx) diff --git a/python/src/ouster/cli/plugins/source_sensor.py b/python/src/ouster/cli/plugins/source_sensor.py new file mode 100644 index 00000000..7edaf10b --- /dev/null +++ b/python/src/ouster/cli/plugins/source_sensor.py @@ -0,0 +1,106 @@ +# type: ignore +import json + +import click + +import ouster.sdk.client as client + +from .source_util import (SourceCommandContext, + SourceCommandType, + source_multicommand) + + +@click.group(name="sensor", hidden=True) +def sensor_group() -> None: + """Commands for working with sensors.""" + pass + + +@click.command +@click.option('--legacy/--non-legacy', + default=False, + help="Use legacy metadata format or not") +@click.pass_context +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def sensor_metadata(ctx: SourceCommandContext, click_ctx: click.core.Context, + legacy: bool) -> None: + """Display sensor metadata about the SOURCE.""" # Implements ouster-cli source metadata + try: + click.echo(client.Sensor(ctx.source_uri, 7502, 7503, + _legacy_format=legacy)._fetched_meta) + except RuntimeError as e: + raise click.ClickException(str(e)) + + +@click.command() +@click.argument('keyval', metavar='[KEY VAL]...', type=str, nargs=-1) +@click.option('-d', 'dump', is_flag=True, help='Dump current configuration') +@click.option('-c', 'file', type=click.File(), help='Read config from file') +@click.option('-u', 'auto', is_flag=True, help='Set automatic udp dest') +@click.option('-p', 'persist', is_flag=True, help='Persist configuration') +@click.option('-s/-n', 'standby', default=None, help='Set STANDBY or NORMAL') +@click.pass_context +@source_multicommand(type=SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context=True) +def sensor_config(ctx: SourceCommandContext, click_ctx: click.core.Context, +keyval, dump, file, auto, persist, standby) -> None: + """Manipulate the sensor configuration. + + Update the sensor configuration or dump it to stdout. The first positional + argument is the sensor hostname; remaining arguments are interpreted as + config parameter key/value pairs, for example: + + \b + $ ouster-cli sensor config os-99xxxxxxxxxx \\ + lidar_mode 2048x10 azimuth_window "[20000, 60000]" + + If no options or config param values are specified, use the default UDP + ports, automatic UDP destination, full azimuth azimuth window, and set the + operating mode to NORMAL. + """ + hostname = ctx.source_uri + + def parse(s): + """Helper to read cli arg as json value with fallback to string.""" + try: + return json.loads(s) + except json.decoder.JSONDecodeError: + return json.loads(f'"{s}"') + + if dump: + if file or keyval or auto or persist or standby is not None: + raise click.ClickException("Cannot use other options with `-d` command") + cfg = client.get_config(hostname) + click.echo(cfg) + return + elif file: + if keyval: + raise click.ClickException("Cannot specify extra config keys with `-c`") + cfg = client.SensorConfig(file.read()) + click.echo("Setting config from file:") + elif not keyval and not auto and standby is None: + auto = True + cfg = client.SensorConfig() + cfg.udp_port_lidar = 7502 + cfg.udp_port_imu = 7503 + cfg.azimuth_window = (0, 360000) + cfg.signal_multiplier = 1 + cfg.operating_mode = client.OperatingMode.OPERATING_NORMAL + click.echo("No config specified; using defaults and auto UDP dest:") + else: + if len(keyval) % 2 != 0: + raise click.ClickException(f"Unmatched key/value arg: {keyval[-1]}") + d = dict(zip(keyval[::2], map(parse, keyval[1::2]))) + cfg = client.SensorConfig(json.dumps(d)) + click.echo("Updating configuration:") + + if standby is not None: + cfg.operating_mode = (client.OperatingMode.OPERATING_STANDBY if standby + else client.OperatingMode.OPERATING_NORMAL) + + click.echo(f"{cfg}") + try: + client.set_config(hostname, cfg, udp_dest_auto=auto, persist=persist) + except RuntimeError as e: + raise click.ClickException(str(e)) diff --git a/python/src/ouster/cli/plugins/source_util.py b/python/src/ouster/cli/plugins/source_util.py new file mode 100644 index 00000000..0d1d5901 --- /dev/null +++ b/python/src/ouster/cli/plugins/source_util.py @@ -0,0 +1,190 @@ +from enum import IntEnum +from functools import wraps +from ouster.sdk.client import LidarScan, ScanSource +from typing import (Callable, List, Any, Union, + Dict, Optional, Iterator) +from threading import Event +from dataclasses import dataclass +import queue +import click + + +class SourceCommandType(IntEnum): + MULTICOMMAND_UNSUPPORTED = 0 + PROCESSOR = 1 + CONSUMER = 2 + + +@dataclass(init=False) +class SourceCommandContext: + source_uri: Optional[str] + scan_source: Optional[Union[ScanSource, Any]] + scan_iter: Optional[Iterator[LidarScan]] + terminate_evt: Optional[Event] + main_thread_fn: Optional[Callable[[None], None]] + thread_fns: List[Callable[[None], None]] + invoked_command_names: List[str] + misc: Dict[Any, Any] + terminate_exception: Optional[Exception] + + def __init__(self) -> None: + self.source_uri = "" + self.scan_source = None + self.scan_iter = None + self.terminate_evt = None + self.main_thread_fn = None + self.thread_fns = [] + self.invoked_command_names = [] + self.misc = {} + self.terminate_exception = None + + # [kk] NOTE: get and __getitem__ are defined to support + # older code that still treats ctx.obj as a dict + # We should refactor those calls out, and remove these methods + def get(self, key: str, default: Any) -> Any: + return self.misc.get(key, default) + + def __getitem__(self, key: str) -> Any: + return self.misc[key] + + +@dataclass +class SourceCommandCallback: + callback_fn: Callable[[SourceCommandContext], None] + type: SourceCommandType + + +def source_multicommand(type: SourceCommandType = SourceCommandType.MULTICOMMAND_UNSUPPORTED, + retrieve_click_context: bool = False): + def source_multicommand_wrapper(fn): + @wraps(fn) + def callback_wrapped(click_ctx: click.core.Context, *args, **kwargs): + # Extract ctx.obj: SourceCommandContext from click context + if not retrieve_click_context: + return SourceCommandCallback(lambda ctx: fn(ctx, *args, **kwargs), type) # type: ignore + else: + return SourceCommandCallback( + lambda ctx: fn(ctx, click_ctx, *args, **kwargs), type) # type: ignore + return callback_wrapped + return source_multicommand_wrapper + + +class CoupledTee: + sentinel = object() + _queues: List[queue.Queue] + + def __init__(self, iter: Iterator, n: int = 2, + terminate: Optional[Event] = None, + copy_fn: Optional[Callable] = None, + poll_wait_sec: float = 0.25) -> None: + self._iter = iter + self._n = n + self._queues = [queue.Queue() for _ in range(n - 1)] + self._next = None + self._terminate = terminate + self._copy_fn = (copy_fn if copy_fn is not None else + lambda x: x) + self._poll_wait_sec = poll_wait_sec + + def main_tee(self) -> Iterator: + try: + for val in self._iter: + for q in self._queues: + q.put(val) + yield val + for q in self._queues: + with q.all_tasks_done: + while q.unfinished_tasks: + q.all_tasks_done.wait(self._poll_wait_sec) + if self._terminate and self._terminate.is_set(): + return + except Exception as ex: + for q in self._queues: + q.put(ex) + raise ex + + # propagate StopIteration to sub_tees + for q in self._queues: + q.put(StopIteration()) + + def sub_tee(self, idx: int) -> Iterator: + while True: + try: + val = self._queues[idx].get(block=True, timeout=0.5) + self._queues[idx].task_done() + if isinstance(val, Exception): + # python doesnt allow you to throw a stop iteration here + if isinstance(val, StopIteration): + return + raise val + yield val + except queue.Empty: + if self._terminate and self._terminate.is_set(): + return + + @staticmethod + def tee(iter: Iterator, n: int = 2, **kwargs) -> List[Iterator]: + ct = CoupledTee(iter, **kwargs) + tees = [] + tees.append(ct.main_tee()) + for i in range(n - 1): + tees.append(ct.sub_tee(i)) + return tees + + +def _join_with_conjunction(things_to_join: List[str], separator: str = ', ', conjunction: str = 'or') -> str: + """Given a list of things, return a string like + 'Thing A, Thing B, or Thing C' + """ + strings = [str(x) for x in things_to_join] + if conjunction and len(strings) > 1: + strings[-1] = conjunction + " " + strings[-1] + if len(strings) == 2: + if conjunction: + return ' '.join(strings) + else: + return separator.join(strings) + return separator.join(strings) + + +ROS_MODULES_ERROR_MSG = """ +Error: {err_msg} + +Please verify that ROS Python modules are available. + +The best option is to try to install unofficial rospy packages that work +with Python 3.8 on Ubuntu 18.04/20.04 and Debian 10 without ROS: + + pip install --extra-index-url https://rospypi.github.io/simple/ rospy rosbag tf2_ros + +NOTE: If during the attempt to run the above command you get an error: + + EnvironmentError: 404 Client Error: Not Found for url: https://pypi.org/simple/rospy/ + +Please check installed `pip` version (20.0+ works well with extra indexes), and +if needed upgrade `pip` with (in a sourced venv): + + pip install pip -U + +Alternatively, the bagpy package might work on some systems: + + pip install bagpy + +Some users have even more packages missing so they may need to aditionally install: + + pip install PyYAML pycryptodome pycryptodomex + +""" + + +def import_rosbag_modules(raise_on_fail: bool = False) -> bool: + try: + import rosbag # type: ignore # noqa: F401 + import rospy # type: ignore # noqa: F401 + import genpy # type: ignore # noqa: F401 + except ImportError as err: + if raise_on_fail: + raise ModuleNotFoundError( + ROS_MODULES_ERROR_MSG.format(err_msg=str(err))) + return False + return True diff --git a/python/src/ouster/cli/plugins/testing.py b/python/src/ouster/cli/plugins/testing.py index ce04e06e..be945fa9 100644 --- a/python/src/ouster/cli/plugins/testing.py +++ b/python/src/ouster/cli/plugins/testing.py @@ -5,8 +5,8 @@ from typing import IO from os import path -import ouster.client as client -import ouster.client._digest as digest +import ouster.sdk.client as client +import ouster.sdk.client._digest as digest from ouster.sdk.util import resolve_metadata _click_ro_file = click.Path(exists=True, dir_okay=False, readable=True) @@ -31,7 +31,7 @@ def testing_time(pcap_file: str, json: IO) -> None: """ try: - import ouster.pcap as pcap + import ouster.sdk.pcap as pcap except ImportError: raise click.ClickException("Please verify that libpcap is installed") @@ -81,7 +81,7 @@ def compute_digest(file: str, meta: str, lidar_port: int, imu_port: int, packets: client.PacketSource if path.splitext(file)[1] == ".pcap": try: - from ouster import pcap + from ouster.sdk import pcap except ImportError: raise click.ClickException( "Please verify that libpcap is installed") diff --git a/python/src/ouster/client/__init__.py b/python/src/ouster/client/__init__.py index a7edbca5..f238a589 100644 --- a/python/src/ouster/client/__init__.py +++ b/python/src/ouster/client/__init__.py @@ -1,63 +1,5 @@ -""" -Copyright (c) 2021, Ouster, Inc. -All rights reserved. - -Python sensor client -""" # flake8: noqa (unused imports) -from ._client import SensorInfo -from ._client import DataFormat -from ._client import LidarMode -from ._client import TimestampMode -from ._client import OperatingMode -from ._client import MultipurposeIOMode -from ._client import Polarity -from ._client import NMEABaudRate -from ._client import ChanField -from ._client import UDPProfileLidar -from ._client import UDPProfileIMU -from ._client import SensorConfig -from ._client import SensorCalibration -from ._client import ShotLimitingStatus -from ._client import ThermalShutdownStatus -from ._client import init_logger -from ._client import convert_to_legacy -from ._client import get_config -from ._client import set_config -from ._client import LidarScan -from ._client import get_field_types -from ._client import _Packet -from ._client import _LidarPacket -from ._client import _ImuPacket - -from .data import BufferT -from .data import FieldDType -from .data import FieldTypes -from .data import Packet -from .data import ImuPacket -from .data import LidarPacket -from .data import LidarPacketValidator -from .data import ColHeader -from .data import XYZLut -from .data import destagger -from .data import PacketValidationFailure, PacketIdError, PacketSizeError -from .data import packet_ts - -from .core import ClientError -from .core import ClientTimeout -from .core import ClientOverflow -from .core import PacketSource -from .core import ScanSource -from .core import Packets -from .core import Sensor -from .core import Scans -from .core import first_valid_column -from .core import last_valid_column -from .core import first_valid_column_ts -from .core import first_valid_packet_ts -from .core import last_valid_column_ts -from .core import first_valid_column_pose -from .core import last_valid_column_pose -from .core import valid_packet_idxs -from .core import poses_present +print("warning: the ouster.client module has been moved to ouster.sdk.client, " + "please use the new path to avoid this warning.") +from ouster.sdk.client import * diff --git a/python/src/ouster/osf/__init__.py b/python/src/ouster/osf/__init__.py index c9795127..4032af47 100644 --- a/python/src/ouster/osf/__init__.py +++ b/python/src/ouster/osf/__init__.py @@ -1,28 +1,5 @@ -""" -Copyright (c) 2021, Ouster, Inc. -All rights reserved. - -API to work with OSF files -""" # flake8: noqa (unused imports) -from ._osf import Reader -from ._osf import MessageRef -from ._osf import ChunkRef -from ._osf import MetadataStore -from ._osf import MetadataEntry -from ._osf import LidarSensor -from ._osf import Extrinsics -from ._osf import LidarScanStreamMeta -from ._osf import LidarScanStream -from ._osf import StreamStats -from ._osf import StreamingInfo -from ._osf import ChunksLayout -from ._osf import Writer - -from ._osf import slice_and_cast - -from .data import resolve_field_types - -from .data import Scans - +print("warning: the ouster.osf module has been moved to ouster.sdk.osf, " + "please use the new path to avoid this warning.") +from ouster.sdk.osf import * \ No newline at end of file diff --git a/python/src/ouster/osf/data.py b/python/src/ouster/osf/data.py deleted file mode 100644 index 81b69515..00000000 --- a/python/src/ouster/osf/data.py +++ /dev/null @@ -1,184 +0,0 @@ -from ouster import client -import ouster.osf as osf - -import numpy as np -from typing import cast, Iterator, Union, Tuple, List - - -class Scans(client.ScanSource): - """An iterable stream of ``LidarScan`` read from OSF file (for the first available sensor).""" - - def __init__(self, - osf_file: str, - *, - cycle: bool = False, - start_ts: int = 0, - sensor_id: int = 0): - """ - Args: - osf_file: OSF filename as scans source - cycle: repeat infinitely after iteration is finished is True - start_ts: return lidar scans starting from the specified start_ts - (in nanoseconds) - sensor_id: id of the sensor which LidarScan stream data to read - (i.e. id of the metadata entry with ``osf.LidarSensor`` type). - 0 (default) means that first LidarSensor from the OSF is used. - """ - self._reader = osf.Reader(osf_file) - self._cycle = cycle - self._start_ts = start_ts - self._sensor_id = sensor_id - - if self._sensor_id: - # sensor_id is passed so we can get the sensor metadata - # entry directly by metadata entry id - sensor_meta = self._reader.meta_store[self._sensor_id] - if sensor_meta and sensor_meta.of(osf.LidarSensor): - self._sensor = sensor_meta - else: - raise ValueError(f"Error: Sensor is not found by sensor_id: " - f" {self._sensor_id}") - else: - # sensor_id is not provided, so we get the first - # osf.LidarSensor metadata entry and use its stream - sensor_meta = self._reader.meta_store.get(osf.LidarSensor) - if not sensor_meta: - raise ValueError("Error: No sensors found in OSF file") - self._sensor = sensor_meta - - # check for Extrinsics - extrinsics = self._reader.meta_store.find(osf.Extrinsics) - for _, v in extrinsics.items(): - if v.ref_meta_id == self._sensor.id: - print(f"Found extrinsics for sensor[{self._sensor.id}]:\n", - v.extrinsics) - self._sensor.info.extrinsic = v.extrinsics - - # Find the corresponding stream_id for the sensor - scan_streams = self._reader.meta_store.find(osf.LidarScanStream) - self._sensor_stream_id = next((mid for mid, m in scan_streams.items() - if m.sensor_meta_id == self._sensor.id), - 0) - if not self._sensor_stream_id: - raise ValueError(f"Error: No LidarScan stream found for sensor" - f" id:{self._sensor.id} in an OSF file") - - def __iter__(self) -> Iterator[client.LidarScan]: - """Iterator that returns ``LidarScan`` objects.""" - for _, ls in self.withTs(): - yield ls - - def withTs(self) -> Iterator[Tuple[int, client.LidarScan]]: - """Iterator that returns tuple of (``ts``, ``LidarScan``) - - Where ``ts`` - is a timestamp (ns) of a ``LidarScan`` (usually as a - timestamp of a first packet in a ``LidarScan``) - """ - while True: - # TODO[pb]: Read only specified _sensor_stream_id stream - for msg in self._reader.messages([self._sensor_stream_id], - self._start_ts, - self._reader.end_ts): - if msg.id == self._sensor_stream_id: - scan = msg.decode() - if scan: - yield msg.ts, cast(client.LidarScan, scan) - if not self._cycle: - break - - def close(self) -> None: - # TODO[pb]: Do the close for Reader? - pass - - @property - def metadata(self) -> client.SensorInfo: - """Return metadata of a Lidar Sensor used.""" - return self._sensor.info - - -def resolve_field_types( - metadata: Union[client.SensorInfo, List[client.SensorInfo]], - flags: bool = False, - raw_headers: bool = False, - raw_fields: bool = False -) -> Union[client.FieldTypes, List[client.FieldTypes]]: - """Resolving optimal field types for OSF LidarScanStream encoder - - Shrinks the sizes of the LEGACY UDPLidarProfile fields and extends with - FLAGS/FLAGS2 if `flags=True`. - - Args: - metadata: single SensorInfo or a list of SensorInfo used resolve - UDPLidarProfile - flags: True if augment the resulting fields with FLAGS/FLAGS2 - raw_headers: True if RAW_HEADERS field should be included (i.e. all - lidar packet headers and footers will be added during - batching) - raw_fields: True if RAW32_WORDx fields should be included - - Returns: - field types of a typical LidarScan with a requested optional fields. - """ - - single_result = False - if not isinstance(metadata, list): - metadata = [metadata] - single_result = True - - field_types = [] - - for i, m in enumerate(metadata): - ftypes = client.get_field_types(m) - profile = m.format.udp_profile_lidar - - # HACK: Overwrite fields to reduced datatypes for LEGACY (saves ~15% of - # space in a file) - if profile == client.UDPProfileLidar.PROFILE_LIDAR_LEGACY: - ftypes.update( - dict({ - client.ChanField.RANGE: np.uint32, - client.ChanField.SIGNAL: np.uint16, - client.ChanField.REFLECTIVITY: np.uint16, - client.ChanField.NEAR_IR: np.uint16 - })) - - if flags: - ftypes.update({client.ChanField.FLAGS: np.uint8}) - if client.ChanField.RANGE2 in ftypes: - ftypes.update({client.ChanField.FLAGS2: np.uint8}) - - if raw_fields: - ftypes.update({client.ChanField.RAW32_WORD1: np.uint32}) - if profile != client.UDPProfileLidar.PROFILE_LIDAR_RNG15_RFL8_NIR8: - # not Low Bandwidth - ftypes.update( - {client.ChanField.RAW32_WORD2: np.uint32}) - ftypes.update( - {client.ChanField.RAW32_WORD3: np.uint32}) - if client.ChanField.RANGE2 in ftypes: - ftypes.update( - {client.ChanField.RAW32_WORD4: np.uint32}) - if profile == client.UDPProfileLidar.PROFILE_LIDAR_FIVE_WORD_PIXEL: - ftypes.update( - dict({ - client.ChanField.RAW32_WORD4: np.uint32, - client.ChanField.RAW32_WORD5: np.uint32 - })) - - if raw_headers: - # getting the optimal field type for RAW_HEADERS - pf = client._client.PacketFormat.from_info(m) - h = pf.pixels_per_column - raw_headers_space = (pf.packet_header_size + - pf.packet_footer_size + pf.col_header_size + - pf.col_footer_size) - dtype = [ - np.uint8, - np.uint16, - np.uint32 - ][int(raw_headers_space / h)] - ftypes.update({client.ChanField.RAW_HEADERS: dtype}) # type: ignore - - field_types.append(ftypes) - - return field_types[0] if single_result else field_types diff --git a/python/src/ouster/pcap/__init__.py b/python/src/ouster/pcap/__init__.py index e7511254..a53a354e 100644 --- a/python/src/ouster/pcap/__init__.py +++ b/python/src/ouster/pcap/__init__.py @@ -1,12 +1,5 @@ -""" -Copyright (c) 2021, Ouster, Inc. -All rights reserved. +# flake8: noqa (unused imports) -Pcap tools to record/read/write Ouster sensor data.""" -# flake8: noqa: F401 (unused imports) - -from .pcap import Pcap -from .pcap import record -from .pcap import _guess_ports -from .pcap import _packet_info_stream -from .pcap import _replay +print("warning: the ouster.pcap module has been moved to ouster.sdk.pcap, " + "please use the new path to avoid this warning.") +from ouster.sdk.pcap import * diff --git a/python/src/ouster/sdk/__init__.py b/python/src/ouster/sdk/__init__.py index e69de29b..9e64bf68 100644 --- a/python/src/ouster/sdk/__init__.py +++ b/python/src/ouster/sdk/__init__.py @@ -0,0 +1,9 @@ +""" +Copyright (c) 2024, Ouster, Inc. +All rights reserved. + +Ouster-SDL +""" +# flake8: noqa (unused imports) + +from .open_source import open_source \ No newline at end of file diff --git a/python/src/ouster/sdk/bag/__init__.py b/python/src/ouster/sdk/bag/__init__.py new file mode 100644 index 00000000..bcfc8bea --- /dev/null +++ b/python/src/ouster/sdk/bag/__init__.py @@ -0,0 +1,10 @@ +""" +Copyright (c) 2024, Ouster, Inc. +All rights reserved. + +Ouster Bag file support +""" +# flake8: noqa (unused imports) + +from .bag import BagSource # type: ignore +from .bag import PacketMsg # type: ignore \ No newline at end of file diff --git a/python/src/ouster/sdkx/bag.py b/python/src/ouster/sdk/bag/bag.py similarity index 99% rename from python/src/ouster/sdkx/bag.py rename to python/src/ouster/sdk/bag/bag.py index 7a8544a8..9849714e 100644 --- a/python/src/ouster/sdkx/bag.py +++ b/python/src/ouster/sdk/bag/bag.py @@ -6,7 +6,7 @@ import genpy import collections -from ouster import client +from ouster.sdk import client import time diff --git a/python/src/ouster/client/py.typed b/python/src/ouster/sdk/bag/py.typed similarity index 100% rename from python/src/ouster/client/py.typed rename to python/src/ouster/sdk/bag/py.typed diff --git a/python/src/ouster/sdk/client/__init__.py b/python/src/ouster/sdk/client/__init__.py new file mode 100644 index 00000000..138836ee --- /dev/null +++ b/python/src/ouster/sdk/client/__init__.py @@ -0,0 +1,73 @@ +""" +Copyright (c) 2021, Ouster, Inc. +All rights reserved. + +Python sensor client +""" +# flake8: noqa (unused imports) + +from ._client import SensorInfo +from ._client import DataFormat +from ._client import LidarMode +from ._client import TimestampMode +from ._client import OperatingMode +from ._client import MultipurposeIOMode +from ._client import Polarity +from ._client import NMEABaudRate +from ._client import ChanField +from ._client import UDPProfileLidar +from ._client import UDPProfileIMU +from ._client import SensorConfig +from ._client import SensorCalibration +from ._client import ShotLimitingStatus +from ._client import ThermalShutdownStatus +from ._client import FullScaleRange +from ._client import ReturnOrder +from ._client import init_logger +from ._client import convert_to_legacy +from ._client import get_config +from ._client import set_config +from ._client import LidarScan +from ._client import get_field_types +from ._client import _Packet +from ._client import _LidarPacket +from ._client import _ImuPacket + +from .data import BufferT +from .data import FieldDType +from .data import FieldTypes +from .data import Packet +from .data import ImuPacket +from .data import LidarPacket +from .data import LidarPacketValidator +from .data import ColHeader +from .data import XYZLut +from .data import destagger +from .data import PacketValidationFailure, PacketIdError, PacketSizeError +from .data import packet_ts + +from .scan_source import ScanSource +from .multi_scan_source import MultiScanSource +from .scan_source_adapter import ScanSourceAdapter + +from .core import ClientError +from .core import ClientTimeout +from .core import ClientOverflow +from .core import PacketSource +from .core import Packets +from .core import Sensor +from .core import Scans +from .core import FrameBorder +from .core import first_valid_column +from .core import last_valid_column +from .core import first_valid_column_ts +from .core import first_valid_packet_ts +from .core import last_valid_column_ts +from .core import first_valid_column_pose +from .core import last_valid_column_pose +from .core import valid_packet_idxs +from .core import poses_present + +from .multi import PacketMultiSource # type: ignore +from .multi import PacketMultiWrapper # type: ignore +from .multi import ScansMulti # type: ignore \ No newline at end of file diff --git a/python/src/ouster/client/_client.pyi b/python/src/ouster/sdk/client/_client.pyi similarity index 84% rename from python/src/ouster/client/_client.pyi rename to python/src/ouster/sdk/client/_client.pyi index bd0b1202..a73309e7 100644 --- a/python/src/ouster/client/_client.pyi +++ b/python/src/ouster/sdk/client/_client.pyi @@ -40,47 +40,94 @@ class _ImuPacket(_Packet): pass -class Client: +class Event: + source: int + state: ClientState + + def __init__(self, source: int, state: ClientState) -> None: + ... + + +class SensorConnection: @overload def __init__(self, hostname: str = ..., lidar_port: int = ..., - imu_port: int = ..., - capacity: int = ...) -> None: + imu_port: int = ...) -> None: ... @overload def __init__(self, - hostname: str, - udp_dest_host: str, + hostname: str = ..., + udp_dest_host: str = ..., mode: LidarMode = ..., timestamp_mode: TimestampMode = ..., lidar_port: int = ..., imu_port: int = ..., timeout_sec: int = ..., - capacity: int = ...) -> None: + persist_config: bool = ...) -> None: + ... + + def poll(self, timeout_sec: int) -> ClientState: + ... + + def read_lidar_packet(self, packet: _LidarPacket, pf: PacketFormat) -> bool: + ... + + def read_imu_packet(self, packet: _ImuPacket, pf: PacketFormat) -> bool: + ... + + @property + def lidar_port(self) -> int: ... - def get_metadata(self, timeout_sec: int = ..., legacy: bool = ...) -> str: + @property + def imu_port(self) -> int: + ... + + def get_metadata(self, timeout_sec: int, legacy: bool) -> str: ... def shutdown(self) -> None: ... - def consume(self, - lidarp: _LidarPacket, - imup: _ImuPacket, - timeout_sec: float) -> ClientState: + +class UDPPacketSource: + def __init__(self) -> None: ... - def produce(self, pf: PacketFormat) -> None: + @overload + def add_client(self, + connection: SensorConnection, + lidar_buf_size: int, + lidar_packet_size: int, + imu_buf_size: int, + imu_packet_size: int) -> None: ... - def flush(self, n_packets: int = ...) -> None: + @overload + def add_client(self, + connection: SensorConnection, + metadata: SensorInfo, + seconds_to_buffer: float) -> None: ... - @property - def capacity(self) -> int: + def shutdown(self) -> None: + ... + + def pop(self, timeout_sec: float) -> Event: + ... + + def packet(self, e: Event) -> _Packet: + ... + + def advance(self, e: Event) -> None: + ... + + def produce(self) -> None: + ... + + def flush(self) -> None: ... @property @@ -88,11 +135,57 @@ class Client: ... @property - def lidar_port(self) -> int: + def capacity(self) -> int: + ... + + +class Client: + @overload + def __init__(self, + connection: SensorConnection, + lidar_buf_size: int, + lidar_packet_size: int, + imu_buf_size: int, + imu_packet_size: int) -> None: + ... + + @overload + def __init__(self, + connection: SensorConnection, + metadata: SensorInfo, + seconds_to_buffer: float) -> None: + ... + + def shutdown(self) -> None: + ... + + def pop(self, timeout_sec: float) -> ClientState: + ... + + def packet(self, st: ClientState) -> _Packet: + ... + + def advance(self, st: ClientState) -> None: + ... + + def consume(self, + lidarp: _LidarPacket, + imup: _ImuPacket, + timeout_sec: float) -> ClientState: + ... + + def produce(self) -> None: + ... + + def flush(self) -> None: ... @property - def imu_port(self) -> int: + def size(self) -> int: + ... + + @property + def capacity(self) -> int: ... @@ -230,6 +323,10 @@ class PacketFormat: def packet_footer_size(self) -> int: ... + @property + def max_frame_id(self) -> int: + ... + def packet_type(self, buf: BufferT) -> int: ... @@ -338,7 +435,7 @@ class PacketWriter(PacketFormat): ... -def scan_to_packets(ls: LidarScan, pw: PacketWriter) -> List[_LidarPacket]: +def scan_to_packets(ls: LidarScan, pw: PacketWriter, init_id: int, prod_sn: int) -> List[_LidarPacket]: ... @@ -491,6 +588,59 @@ class Polarity: ... +class FullScaleRange: + FSR_NORMAL: ClassVar[FullScaleRange] + FSR_EXTENDED: ClassVar[FullScaleRange] + + __members__: ClassVar[Dict[str, FullScaleRange]] + values: ClassVar[Iterator[FullScaleRange]] + + def __init__(self, code: int) -> None: + ... + + def __int__(self) -> int: + ... + + @property + def name(self) -> str: + ... + + @property + def value(self) -> int: + ... + + @classmethod + def from_string(cls, s: str) -> FullScaleRange: + ... + + +class ReturnOrder: + ORDER_STRONGEST_TO_WEAKEST: ClassVar[ReturnOrder] + ORDER_FARTHEST_TO_NEAREST: ClassVar[ReturnOrder] + ORDER_NEAREST_TO_FARTHEST: ClassVar[ReturnOrder] + + __members__: ClassVar[Dict[str, ReturnOrder]] + values: ClassVar[Iterator[ReturnOrder]] + + def __init__(self, code: int) -> None: + ... + + def __int__(self) -> int: + ... + + @property + def name(self) -> str: + ... + + @property + def value(self) -> int: + ... + + @classmethod + def from_string(cls, s: str) -> ReturnOrder: + ... + + class NMEABaudRate: BAUD_9600: ClassVar[NMEABaudRate] BAUD_115200: ClassVar[NMEABaudRate] @@ -712,6 +862,10 @@ class SensorConfig: columns_per_packet: Optional[int] udp_profile_lidar: Optional[UDPProfileLidar] udp_profile_imu: Optional[UDPProfileIMU] + min_range_threshold_cm: Optional[int] + gyro_fsr: Optional[FullScaleRange] + accel_fsr: Optional[FullScaleRange] + return_order: Optional[ReturnOrder] @overload def __init__(self) -> None: @@ -768,7 +922,7 @@ class LidarScan: frame_status: int @overload - def __init__(self, w: int, h: int) -> None: + def __init__(self, h: int, w: int) -> None: ... @overload @@ -785,7 +939,15 @@ class LidarScan: ... @overload - def __init__(self, w: int, h: int, fields: Dict[ChanField, FieldDType], columns_per_packet: int) -> None: + def __init__(self, h: int, w: int, fields: Dict[ChanField, FieldDType], columns_per_packet: int) -> None: + ... + + @overload + def __init__(self, scan: LidarScan) -> None: + ... + + @overload + def __init__(self, scan: LidarScan, fields: Dict[ChanField, FieldDType]) -> None: ... @property @@ -833,13 +995,6 @@ class LidarScan: def fields(self) -> Iterator[ChanField]: ... - def to_native(self) -> LidarScan: - ... - - @classmethod - def from_native(cls, scan: LidarScan) -> LidarScan: - ... - def destagger_int8(field: ndarray, shifts: List[int], inverse: bool) -> ndarray: diff --git a/python/src/ouster/client/_digest.py b/python/src/ouster/sdk/client/_digest.py similarity index 86% rename from python/src/ouster/client/_digest.py rename to python/src/ouster/sdk/client/_digest.py index 10223150..61e37b79 100644 --- a/python/src/ouster/client/_digest.py +++ b/python/src/ouster/sdk/client/_digest.py @@ -18,6 +18,27 @@ from .core import (Packets, PacketSource, Scans) +# NOTE[pb]: Extracted from LidarPacket for keeping tests based on digests working +# this method is deprecated along with ColHeader and should be cleaned +# fully eventually. +def _get_packet_header(packet: LidarPacket, header: ColHeader) -> np.ndarray: + """Create a view of the specified column header. + + This method is deprecated. Use the ``timestamp``, ``measurement_id`` or + ``status`` properties instead. + + Args: + header: The column header to parse + + Returns: + A numpy array containing a copy of the specified header values + """ + + res = packet._pf.packet_header(header, packet._data) + res.flags.writeable = False + return res + + def _md5(a: np.ndarray) -> str: """Get md5 hash of a numpy array.""" return hashlib.md5(a.tobytes()).hexdigest() @@ -59,7 +80,7 @@ def from_packets(cls, packets: Iterable[LidarPacket]) -> 'FieldDigest': for idx, packet in enumerate(packets): # TODO: add packet headers for h in ColHeader: - hashes[h.name].update(packet.header(h).tobytes()) + hashes[h.name].update(_get_packet_header(packet, h).tobytes()) for f in packet.fields: hashes[f.name].update(packet.field(f).tobytes()) diff --git a/python/src/ouster/client/_utils/__init__.py b/python/src/ouster/sdk/client/_utils/__init__.py similarity index 100% rename from python/src/ouster/client/_utils/__init__.py rename to python/src/ouster/sdk/client/_utils/__init__.py diff --git a/python/src/ouster/client/core.py b/python/src/ouster/sdk/client/core.py similarity index 77% rename from python/src/ouster/client/core.py rename to python/src/ouster/sdk/client/core.py index f5d65395..0bf1f16d 100644 --- a/python/src/ouster/client/core.py +++ b/python/src/ouster/sdk/client/core.py @@ -7,19 +7,24 @@ generated using pybind11. """ from contextlib import closing -from typing import cast, Dict, Iterable, Iterator, List, Optional, Tuple, Union +from typing import (cast, Dict, Iterable, Iterator, List, Optional, Tuple, + Union, Callable) from threading import Thread import time from math import ceil +import numpy as np from more_itertools import take from typing_extensions import Protocol -from . import _client -from ._client import (SensorInfo, LidarScan, UDPProfileLidar) -from .data import (ChanField, FieldDType, ImuPacket, LidarPacket, Packet, - PacketIdError) -import numpy as np +from ._client import (SensorInfo, SensorConnection, Client, ClientState, + PacketFormat, LidarScan, ScanBatcher, get_field_types) + +from .data import (ChanField, FieldDType, ImuPacket, + LidarPacket, Packet, PacketIdError, + FieldTypes) + +from .scan_source import ScanSource class ClientError(Exception): @@ -57,25 +62,8 @@ def close(self) -> None: """Release the underlying resource, if any.""" ... - -class ScanSource(Protocol): - """Represents a single-sensor data stream.""" - - def __iter__(self) -> Iterator[LidarScan]: - """A ScanSource supports ``Iterable[LidarScan]``. - - Currently defined explicitly due to: - https://github.com/python/typing/issues/561 - """ - ... - @property - def metadata(self) -> SensorInfo: - """Metadata associated with the scan stream.""" - ... - - def close(self) -> None: - """Release the underlying resource, if any.""" + def is_live(self): ... @@ -105,6 +93,10 @@ def __iter__(self) -> Iterator[Packet]: def close(self) -> None: pass + @property + def is_live(self) -> bool: + return False + class Sensor(PacketSource): """A packet source listening on local UDP ports. @@ -117,14 +109,17 @@ class Sensor(PacketSource): thread (like any other non-daemonized Python thread). """ - _cli: _client.Client + _connection: SensorConnection + _cli: Client _timeout: Optional[float] _metadata: SensorInfo - _pf: _client.PacketFormat + _pf: PacketFormat _producer: Thread - _cache: Optional[_client.ClientState] + _cache: Optional[ClientState] _lidarbuf: LidarPacket _imubuf: ImuPacket + _lidar_port: int + _hostname: str def __init__(self, hostname: str, @@ -138,7 +133,7 @@ def __init__(self, _flush_before_read: bool = True, _flush_frames: int = 5, _legacy_format: bool = False, - _soft_id_check: bool = False, + soft_id_check: bool = False, _skip_metadata_beam_validation: bool = False) -> None: """ Neither the ports nor udp destination configuration on the sensor will @@ -154,15 +149,17 @@ def __init__(self, timeout: seconds to wait for packets before signaling error or None _overflow_err: if True, raise ClientOverflow _flush_before_read: if True, try to clear buffers before reading + _flush_frames: the number of frames to skip/flush on start of a new iter _legacy_format: if True, use legacy metadata format - _soft_id_check: if True, don't skip lidar packets buffers on, + soft_id_check: if True, don't skip lidar packets buffers on, id mismatch (init_id/sn pair), _skip_metadata_beam_validation: if True, skip metadata beam angle check Raises: ClientError: If initializing the client fails. """ - self._cli = _client.Client(hostname, lidar_port, imu_port, buf_size) + self._connection = SensorConnection( + hostname, lidar_port, imu_port) self._timeout = timeout self._overflow_err = _overflow_err self._flush_before_read = _flush_before_read @@ -171,34 +168,52 @@ def __init__(self, self._flush_frames = _flush_frames self._legacy_format = _legacy_format - self._soft_id_check = _soft_id_check + self._soft_id_check = soft_id_check self._id_error_count = 0 self._skip_metadata_beam_validation = _skip_metadata_beam_validation + self._hostname = hostname + self._lidar_port = lidar_port + # Fetch from sensor if not explicitly provided if metadata: self._metadata = metadata else: self._fetch_metadata() - self._metadata = SensorInfo(self._fetched_meta, self._skip_metadata_beam_validation) - self._pf = _client.PacketFormat.from_info(self._metadata) + self._metadata = SensorInfo( + self._fetched_meta, self._skip_metadata_beam_validation) + self._pf = PacketFormat.from_info(self._metadata) + self._cli = Client(self._connection, buf_size, + self._pf.lidar_packet_size, buf_size, + self._pf.imu_packet_size) self._lidarbuf = LidarPacket(None, self._metadata, _raise_on_id_check=not self._soft_id_check) self._imubuf = ImuPacket(packet_format=self._pf) # Use args to avoid capturing self causing circular reference - self._producer = Thread(target=lambda cli, pf: cli.produce(pf), - args=(self._cli, self._pf)) + self._producer = Thread(target=self._cli.produce) self._producer.start() + @property + def is_live(self) -> bool: + return True + + @property + def lidar_port(self) -> int: + return self._connection.lidar_port + + @property + def imu_port(self) -> int: + return self._connection.imu_port + def _fetch_metadata(self, timeout: Optional[float] = None) -> None: timeout_sec = 45 if timeout: timeout_sec = ceil(timeout) if not self._fetched_meta: - self._fetched_meta = self._cli.get_metadata( - legacy=self._legacy_format, timeout_sec = timeout_sec) + self._fetched_meta = self._connection.get_metadata( + legacy=self._legacy_format, timeout_sec=timeout_sec) if not self._fetched_meta: raise ClientError("Failed to collect metadata") @@ -220,24 +235,34 @@ def _next_packet(self) -> Optional[Packet]: st = self._peek() self._cache = None - if self._overflow_err and st & _client.ClientState.OVERFLOW: - raise ClientOverflow() - if st & _client.ClientState.LIDAR_DATA: - if self._lidarbuf.id_error: + # TODO: revise this part and upper loop to eliminate ValueError + if st & ClientState.OVERFLOW: + if self._overflow_err: + raise ClientOverflow("client packets overflow") + else: + raise ValueError() + if st & ClientState.LIDAR_DATA: + packet = LidarPacket(self._lidarbuf._data, self._metadata, + self._lidarbuf.capture_timestamp, + _raise_on_id_check = not self._soft_id_check) + if packet.id_error: self._id_error_count += 1 - return self._lidarbuf - elif st & _client.ClientState.IMU_DATA: + return packet + elif st & ClientState.IMU_DATA: return self._imubuf - elif st == _client.ClientState.TIMEOUT: - raise ClientTimeout(f"No packets received within {self._timeout}s") - elif st & _client.ClientState.ERROR: + elif st == ClientState.TIMEOUT: + raise ClientTimeout(f"No packets received within {self._timeout}s from sensor " + f"{self._hostname} using udp destination {self._metadata.config.udp_dest} " + f"on port {self._lidar_port}. Check your firewall settings and/or ensure " + f"that the lidar port {self._lidar_port} is not being held open.") + elif st & ClientState.ERROR: raise ClientError("Client returned ERROR state") - elif st & _client.ClientState.EXIT: + elif st & ClientState.EXIT: return None raise AssertionError("Should be unreachable") - def _peek(self) -> _client.ClientState: + def _peek(self) -> ClientState: if self._cache is None: st = self._cli.consume(self._lidarbuf, self._imubuf, @@ -311,7 +336,7 @@ def flush(self, n_frames: int = 3, *, full=False) -> int: while True: # check next packet to see if it's the start of a new frame st = self._peek() - if st & _client.ClientState.LIDAR_DATA: + if st & ClientState.LIDAR_DATA: frame = self._pf.frame_id(self._lidarbuf._data) if frame != last_frame: last_frame = frame @@ -319,9 +344,9 @@ def flush(self, n_frames: int = 3, *, full=False) -> int: if n_frames < 0: break last_ts = time.monotonic() - elif st & _client.ClientState.ERROR: + elif st & ClientState.ERROR: raise ClientError("Client returned ERROR state") - elif st & _client.ClientState.EXIT: + elif st & ClientState.EXIT: break # check for timeout @@ -336,6 +361,7 @@ def flush(self, n_frames: int = 3, *, full=False) -> int: return n_dropped + @property def buf_use(self) -> int: return self._cli.size @@ -352,6 +378,8 @@ def close(self) -> None: self._cli.shutdown() if hasattr(self, '_producer'): self._producer.join() + if hasattr(self, '_connection'): + self._connection.shutdown() def __del__(self) -> None: self.close() @@ -389,12 +417,11 @@ def __init__(self, self._source = source self._complete = complete self._timeout = timeout - self._timed_out = False self._max_latency = _max_latency # used to initialize LidarScan - self._fields: Union[Dict[ChanField, FieldDType], UDPProfileLidar] = ( + self._fields: FieldTypes = ( fields if fields is not None else - self._source.metadata.format.udp_profile_lidar) + get_field_types(self._source.metadata.format.udp_profile_lidar)) def __iter__(self) -> Iterator[LidarScan]: """Get an iterator.""" @@ -410,8 +437,8 @@ def __iter__(self) -> Iterator[LidarScan]: self._source, Sensor) else None ls_write = None - pf = _client.PacketFormat.from_info(self._source.metadata) - batch = _client.ScanBatcher(w, pf) + pf = PacketFormat.from_info(self._source.metadata) + batch = ScanBatcher(w, pf) # Time from which to measure timeout start_ts = time.monotonic() @@ -428,17 +455,14 @@ def __iter__(self) -> Iterator[LidarScan]: if not self._complete or ls_write.complete(column_window): yield ls_write return - except ClientTimeout: - self._timed_out = True - return if self._timeout is not None and (time.monotonic() >= start_ts + self._timeout): - self._timed_out = True - return + raise ClientTimeout(f"No valid frames received within {self._timeout}s") if isinstance(packet, LidarPacket): - ls_write = ls_write or LidarScan(h, w, self._fields, columns_per_packet) + ls_write = ls_write or LidarScan( + h, w, self._fields, columns_per_packet) if batch(packet, ls_write): # Got a new frame, return it and start another @@ -451,12 +475,12 @@ def __iter__(self) -> Iterator[LidarScan]: # Drop data along frame boundaries to maintain _max_latency and # clear out already-batched first packet of next frame if self._max_latency and sensor is not None: - buf_frames = sensor.buf_use() // packets_per_frame + buf_frames = sensor.buf_use // packets_per_frame drop_frames = buf_frames - self._max_latency + 1 if drop_frames > 0: sensor.flush(drop_frames) - batch = _client.ScanBatcher(w, pf) + batch = ScanBatcher(w, pf) def close(self) -> None: """Close the underlying PacketSource.""" @@ -467,6 +491,41 @@ def metadata(self) -> SensorInfo: """Return metadata from the underlying PacketSource.""" return self._source.metadata + @property + def is_live(self) -> bool: + return self._source.is_live + + @property + def is_seekable(self) -> bool: + return False + + @property + def is_indexed(self) -> bool: + return False + + @property + def fields(self) -> FieldTypes: + return self._fields + + @property + def scans_num(self) -> Optional[int]: + return None + + def __len__(self) -> int: + raise TypeError("len is not supported on live or non-indexed sources") + + def _seek(self, int) -> None: + raise RuntimeError( + "can not invoke __getitem__ on non-indexed source") + + def __getitem__(self, key: Union[int, slice] + ) -> Union[Optional[LidarScan], List[Optional[LidarScan]]]: + raise RuntimeError( + "can not invoke __getitem__ on non-indexed source") + + def __del__(self) -> None: + pass + @classmethod def sample( cls, @@ -547,6 +606,29 @@ def stream( _max_latency=2) +class FrameBorder: + """Create callable helper that indicates the cross frames packets.""" + + def __init__(self, pred: Callable[[Packet], bool] = lambda _: True): + self._last_f_id = -1 + self._last_packet_ts = None + self._last_packet_res = False + self._pred = pred + + def __call__(self, packet: Packet) -> bool: + if isinstance(packet, LidarPacket): + # don't examine packets again + if (self._last_packet_ts and packet.capture_timestamp and + self._last_packet_ts == packet.capture_timestamp): + return self._last_packet_res + f_id = packet.frame_id + changed = (self._last_f_id != -1 and f_id != self._last_f_id) + self._last_packet_res = changed and self._pred(packet) + self._last_f_id = f_id + return self._last_packet_res + return False + + def first_valid_column(scan: LidarScan) -> int: """Return first valid column of a LidarScan""" return int(np.bitwise_and(scan.status, 1).argmax()) diff --git a/python/src/ouster/client/data.py b/python/src/ouster/sdk/client/data.py similarity index 89% rename from python/src/ouster/client/data.py rename to python/src/ouster/sdk/client/data.py index f268a4f0..4109c7c7 100644 --- a/python/src/ouster/client/data.py +++ b/python/src/ouster/sdk/client/data.py @@ -6,12 +6,18 @@ from enum import Enum from typing import Callable, Iterator, Type, List, Optional, Union, Dict import logging -import warnings import numpy as np -from . import _client -from ._client import (ChanField, LidarScan, SensorInfo) +from ._client import (ChanField, LidarScan, SensorInfo, PacketFormat, + _ImuPacket, _LidarPacket) + +from ._client import (destagger_int8, destagger_int16, destagger_int32, + destagger_int64, destagger_uint8, destagger_uint16, + destagger_uint32, destagger_uint64, destagger_float, + destagger_double) + +from ._client import XYZLut as client_XYZLut BufferT = Union[bytes, bytearray, memoryview, np.ndarray] """Types that support the buffer protocol.""" @@ -25,19 +31,19 @@ FieldTypes = Dict[ChanField, FieldDType] """LidarScan chan fields with types""" -logger = logging.getLogger("ouster.client.data") +logger = logging.getLogger("ouster.sdk.client.data") -class ImuPacket(_client._ImuPacket): +class ImuPacket(_ImuPacket): """Read IMU Packet data from a buffer.""" - _pf: _client.PacketFormat + _pf: PacketFormat def __init__(self, data: Optional[BufferT] = None, info: Optional[SensorInfo] = None, timestamp: Optional[float] = None, *, - packet_format: Optional[_client.PacketFormat] = None) -> None: + packet_format: Optional[PacketFormat] = None) -> None: """ Args: data: Buffer containing the packet payload @@ -53,7 +59,7 @@ def __init__(self, elif info: # TODO: we should deprecate this, constructing a full PacketFormat # for every single packet seems like an antipattern -- Tim T. - self._pf = _client.PacketFormat.from_info(info) + self._pf = PacketFormat.from_info(info) else: raise ValueError("either packet_format or info should be specified") @@ -142,7 +148,7 @@ def __init__(self, metadata: SensorInfo, checks=['id_and_sn_valid', 'packet_size self._metadata = metadata self._metadata_init_id = metadata.init_id self._metadata_sn = int(metadata.sn) if metadata.sn else 0 - self._pf = _client.PacketFormat.from_info(metadata) + self._pf = PacketFormat.from_info(metadata) self._checks = [getattr(self, check) for check in checks] def check_packet(self, data: BufferT, n_bytes: int) -> List[PacketValidationFailure]: @@ -171,7 +177,7 @@ def packet_size_valid(self, data: BufferT, n_bytes: int) -> Optional[PacketValid return None -class LidarPacket(_client._LidarPacket): +class LidarPacket(_LidarPacket): """Read lidar packet data as numpy arrays. The dimensions of returned arrays depend on the sensor product line and @@ -179,7 +185,7 @@ class LidarPacket(_client._LidarPacket): configured ``columns_per_packet``, while measurement fields will be 2d arrays of size ``pixels_per_column`` by ``columns_per_packet``. """ - _pf: _client.PacketFormat + _pf: PacketFormat _metadata_init_id: int _metadata_sn: int @@ -188,7 +194,7 @@ def __init__(self, info: Optional[SensorInfo] = None, timestamp: Optional[float] = None, *, - packet_format: Optional[_client.PacketFormat] = None, + packet_format: Optional[PacketFormat] = None, _raise_on_id_check: bool = True) -> None: """ Args: @@ -207,7 +213,7 @@ def __init__(self, elif info: # TODO: we should deprecate this, constructing a full PacketFormat # for every single packet seems like an antipattern -- Tim T. - self._pf = _client.PacketFormat.from_info(info) + self._pf = PacketFormat.from_info(info) else: raise ValueError("either packet_format or info should be specified") @@ -304,24 +310,6 @@ def field(self, field: ChanField) -> np.ndarray: res.flags.writeable = False return res - def header(self, header: ColHeader) -> np.ndarray: - """Create a view of the specified column header. - - This method is deprecated. Use the ``timestamp``, ``measurement_id`` or - ``status`` properties instead. - - Args: - header: The column header to parse - - Returns: - A numpy array containing a copy of the specified header values - """ - warnings.warn("LidarPacket.header is deprecated", DeprecationWarning) - - res = self._pf.packet_header(header, self._data) - res.flags.writeable = False - return res - @property def timestamp(self) -> np.ndarray: """Parse the measurement block timestamps out of a packet buffer. @@ -359,16 +347,16 @@ def status(self) -> np.ndarray: def _destagger(field: np.ndarray, shifts: List[int], inverse: bool) -> np.ndarray: return { - np.dtype(np.int8): _client.destagger_int8, - np.dtype(np.int16): _client.destagger_int16, - np.dtype(np.int32): _client.destagger_int32, - np.dtype(np.int64): _client.destagger_int64, - np.dtype(np.uint8): _client.destagger_uint8, - np.dtype(np.uint16): _client.destagger_uint16, - np.dtype(np.uint32): _client.destagger_uint32, - np.dtype(np.uint64): _client.destagger_uint64, - np.dtype(np.single): _client.destagger_float, - np.dtype(np.double): _client.destagger_double, + np.dtype(np.int8): destagger_int8, + np.dtype(np.int16): destagger_int16, + np.dtype(np.int32): destagger_int32, + np.dtype(np.int64): destagger_int64, + np.dtype(np.uint8): destagger_uint8, + np.dtype(np.uint16): destagger_uint16, + np.dtype(np.uint32): destagger_uint32, + np.dtype(np.uint64): destagger_uint64, + np.dtype(np.single): destagger_float, + np.dtype(np.double): destagger_double, }[field.dtype](field, shifts, inverse) @@ -439,7 +427,7 @@ def XYZLut( Returns: A function that computes a point cloud given a range image """ - lut = _client.XYZLut(info, use_extrinsics) + lut = client_XYZLut(info, use_extrinsics) def res(ls: Union[LidarScan, np.ndarray]) -> np.ndarray: if isinstance(ls, LidarScan): diff --git a/python/src/ouster/sdk/client/multi.py b/python/src/ouster/sdk/client/multi.py new file mode 100644 index 00000000..15325985 --- /dev/null +++ b/python/src/ouster/sdk/client/multi.py @@ -0,0 +1,304 @@ +# type: ignore +from typing_extensions import Protocol +from typing import Any, Tuple, List, Union, Optional, Iterator, Callable + +import copy + +from ._client import (SensorInfo, LidarScan, PacketFormat, ScanBatcher, + get_field_types) +from .data import Packet, ImuPacket, LidarPacket, packet_ts, FieldTypes +from .core import PacketSource, first_valid_packet_ts +from .scan_source import ScanSource +from .multi_scan_source import MultiScanSource + + +def collate_scans( + source: Iterator[Tuple[int, Any]], + sensors_count: int, + get_ts: Callable[[Any], int], + *, + dt: int = 10**8 +) -> Iterator[List[Optional[Any]]]: + """Collate by sensor idx with a cut every `dt` (ns) time length. + + Assuming that multi sensor packets stream are PTP synced, so the sensor + time of LidarScans don't have huge deltas in time, though some latency + of packets receiving (up to dt) should be ok. + + Args: + source: data stream with scans + sensors_count: number of sensors generating the stream of scans + dt: max time difference between scans in the collated scan (i.e. + time period at which every new collated scan is released/cut), + default is 0.1 s + Returns: + List of LidarScans elements + """ + min_ts = -1 + max_ts = -1 + collated = [None] * sensors_count + for idx, m in source: + ts = get_ts(m) + if min_ts < 0 or max_ts < 0 or ( + ts >= min_ts + dt or ts < max_ts - dt): + if any(collated): + # process collated (reached dt boundary, if used) + yield collated # type: ignore + collated = [None] * sensors_count + + min_ts = max_ts = ts + + if collated[idx]: + # process collated (reached the existing scan) + yield collated + collated = [None] * sensors_count + min_ts = max_ts = ts + + collated[idx] = m # type: ignore + + if ts < min_ts: + min_ts = ts + + if ts > max_ts: + max_ts = ts + + # process the last one + if any(collated): + # process collated (the very last one, if any) + yield collated # type: ignore + + +class PacketMultiSource(Protocol): + """Represents a multi-sensor data stream.""" + + def __iter__(self) -> Iterator[Tuple[int, Packet]]: + """A PacketSource supports ``Iterable[Tuple[int, Packet]]``. + + Currently defined explicitly due to: + https://github.com/python/typing/issues/561 + """ + ... + + @property + def metadata(self) -> List[SensorInfo]: + """Metadata associated with the packet streams.""" + ... + + @property + def is_live(self) -> bool: + ... + + @property + def is_seekable(self) -> bool: + ... + + @property + def is_indexed(self) -> bool: + ... + + def restart(self) -> None: + """Restart playback, only relevant to non-live sources""" + ... + + def close(self) -> None: + """Release the underlying resources, if any.""" + ... + + +# TODO: schedule for removal +class PacketMultiWrapper(PacketMultiSource): + """Wrap PacketSource to the PacketMultiSource interface""" + + def __init__(self, + source: Union[PacketSource, PacketMultiSource]) -> None: + self._source = source + + def __iter__(self) -> Iterator[Tuple[int, Packet]]: + for p in self._source: + yield (0, p) if isinstance(p, (LidarPacket, + ImuPacket)) else p + + @property + def metadata(self) -> List[SensorInfo]: + """Metadata associated with the packet streams.""" + meta = self._source.metadata + return [meta] if isinstance(meta, SensorInfo) else meta + + def close(self) -> None: + """Release the underlying resource, if any.""" + self._source.close() + + @property + def buf_use(self) -> int: + if hasattr(self._source, "buf_use"): + return self._source.buf_use + else: + return -1 + + +class ScansMulti(MultiScanSource): + """Multi LidarScan source.""" + + def __init__( + self, + source: PacketMultiSource, + *, + dt: int = 10**8, + complete: bool = False, + cycle: bool = False, + fields: Optional[List[FieldTypes]] = None, + **_ + ) -> None: + """ + Args: + source: packet multi source + dt: max time difference between scans in the collated scan (i.e. + time period at which every new collated scan is released/cut), + default is 0.1s + complete: set to True to only release complete scans + cycle: repeat infinitely after iteration is finished is True. + in case source refers to a live sensor then this parameter + has no effect. + fields: specify which channel fields to populate on LidarScans + """ + self._source = source + self._dt = dt + self._complete = complete + self._cycle = cycle + # NOTE[self]: this fields override property may need to double checked + # for example, what would happen if the length of override doesn't + # match with the actual underlying metadata size. Is this a supported + # behavior? For now throwing an error if they don't match in size. + file_fields = [get_field_types( + sinfo) for sinfo in self._source.metadata] + if fields: + if len(fields) != len(file_fields): + raise ValueError("Size of Field override doens't match") + self._fields = fields + else: + self._fields = file_fields + + @property + def sensors_count(self) -> int: + return len(self._source.metadata) + + @property + def metadata(self) -> List[SensorInfo]: + return self._source.metadata + + @property + def is_live(self) -> bool: + return self._source.is_live + + @property + def is_seekable(self) -> bool: + return self._source.is_seekable + + @property + def is_indexed(self) -> bool: + return self._source.is_indexed + + @property + def fields(self) -> List[FieldTypes]: + return self._fields + + @property + def scans_num(self) -> List[Optional[int]]: + if self.is_live or not self.is_indexed: + return [None] * self.sensors_count + raise NotImplementedError + + def __len__(self) -> int: + if self.is_live or not self.is_indexed: + raise TypeError("len is not supported on unindexed or live sources") + raise NotImplementedError + + def __iter__(self) -> Iterator[List[Optional[LidarScan]]]: + return collate_scans(self._scans_iter(True, self._cycle, True), self.sensors_count, + first_valid_packet_ts, + dt=self._dt) + + def _scans_iter(self, restart=True, cycle=False, deep_copy=False + ) -> Iterator[Tuple[int, LidarScan]]: + """ + Parameters: + restart: restart source from beginning if applicable + cycle: when reaching end auto restart + deep_copy: perform deepcopy when yielding scans + """ + w = [int] * self.sensors_count + h = [int] * self.sensors_count + col_window = [int] * self.sensors_count + columns_per_packet = [int] * self.sensors_count + pf = [None] * self.sensors_count + ls_write = [None] * self.sensors_count + batch = [None] * self.sensors_count + + for i, sinfo in enumerate(self.metadata): + w[i] = sinfo.format.columns_per_frame + h[i] = sinfo.format.pixels_per_column + col_window[i] = sinfo.format.column_window + columns_per_packet[i] = sinfo.format.columns_per_packet + pf[i] = PacketFormat.from_info(sinfo) + batch[i] = ScanBatcher(w[i], pf[i]) + + # autopep8: off + scan_shallow_yield = lambda x: x + scan_deep_yield = lambda x: copy.deepcopy(x) + scan_yield_op = scan_deep_yield if deep_copy else scan_shallow_yield + # autopep8: on + + if restart: + self._source.restart() # start from the beginning + while True: + had_message = False + for idx, packet in self._source: + if isinstance(packet, LidarPacket): + ls_write[idx] = ls_write[idx] or LidarScan( + h[idx], w[idx], self._fields[idx], columns_per_packet[idx]) + if batch[idx](packet._data, packet_ts(packet), ls_write[idx]): + if not self._complete or ls_write[idx].complete(col_window[idx]): + had_message = True + yield idx, scan_yield_op(ls_write[idx]) + + # return the last not fully cut scans in the sensor timestamp order if + # they satisfy the completeness criteria + last_scans = sorted( + [(idx, ls) for idx, ls in enumerate(ls_write) if ls is not None], + key=lambda si: first_valid_packet_ts(si[1])) + while last_scans: + idx, ls = last_scans.pop(0) + if not self._complete or ls.complete(col_window[idx]): + had_message = True + yield idx, scan_yield_op(ls) + + # exit if we had no scans so we dont infinite loop when cycling + if cycle and had_message: + self._source.restart() + else: + break + + def _seek(self, offset: int) -> None: + if not self.is_seekable: + raise RuntimeError("can not invoke _seek on non-seekable source") + self._source.seek(offset) + + def __getitem__(self, key: Union[int, slice] + ) -> Union[List[Optional[LidarScan]], List[List[Optional[LidarScan]]]]: + + if not self.is_indexed: + raise RuntimeError( + "can not invoke __getitem__ on non-indexed source") + raise NotImplementedError + + def close(self) -> None: + if self._source: + self._source.close() + self._source = None + + def __del__(self) -> None: + self.close() + + def single_source(self, stream_idx: int) -> ScanSource: + from .scan_source_adapter import ScanSourceAdapter + return ScanSourceAdapter(self, stream_idx) diff --git a/python/src/ouster/sdk/client/multi_scan_source.py b/python/src/ouster/sdk/client/multi_scan_source.py new file mode 100644 index 00000000..76001a24 --- /dev/null +++ b/python/src/ouster/sdk/client/multi_scan_source.py @@ -0,0 +1,88 @@ +from typing import Iterator, List, Optional, Union +from typing_extensions import Protocol +from ._client import SensorInfo, LidarScan +from .data import FieldTypes +from .scan_source import ScanSource + + +class MultiScanSource(Protocol): + """Represents only data stream from more than one source.""" + + @property + def sensors_count(self) -> int: + """Number of individual scan streams that this scan source holds.""" + ... + + @property + def metadata(self) -> List[SensorInfo]: + """A list of Metadata objects associated with every scan streams.""" + ... + + @property + def is_live(self) -> bool: + """True if data obtained from the RUNNING sensor or as a stream from the socket + + Returns: + True if data obtained from the RUNNING sensor or as a stream from the socket + False if data is read from a stored media. Restarting an ``iter()`` means that + the data can be read again. + """ + ... + + @property + def is_seekable(self) -> bool: + """True for non-live sources, This property can be True regardless of scan source being indexed or not. + """ + ... + + @property + def is_indexed(self) -> bool: + """True for IndexedPcap and OSF scan sources, this property tells users whether the underlying source + allows for random access of scans, see __getitem__. + """ + ... + + # Pavlo/Oct19: Optional field, that is currently available only for OSFs and IndexedPcap, and + # None everywhere else. + # UN/Nov21: I don't understand why this only available in OSF and IndexedPcap source + @property + def fields(self) -> List[FieldTypes]: + """Field types are present in the LidarScan objects on read from iterator""" + ... + + @property + def scans_num(self) -> List[Optional[int]]: + """Number of scans available, in case of a live sensor or non-indexable scan source this method + returns a None for that stream""" + ... + + def __len__(self) -> int: + """returns the number of scans containe with the scan_source, in case scan_source holds more than + one stream then this would measure the number of collated scans across the streams + in the case of a live sensor or non-indexable scan source this method throws a TypeError + """ + ... + + def __iter__(self) -> Iterator[List[Optional[LidarScan]]]: + ... + + def _seek(self, key: int) -> None: + """seek/jump to a specific item within the list of LidarScan objects that this particular scan + source has access to""" + ... + + def __getitem__(self, key: Union[int, slice] + ) -> Union[List[Optional[LidarScan]], List[List[Optional[LidarScan]]]]: + """Indexed access and slices support""" + ... + + def close(self) -> None: + """Manually release any underlying resource.""" + ... + + def __del__(self) -> None: + """Automatic release of any underlying resource.""" + ... + + def single_source(self, stream_idx: int) -> ScanSource: + ... diff --git a/python/src/ouster/pcap/py.typed b/python/src/ouster/sdk/client/py.typed similarity index 100% rename from python/src/ouster/pcap/py.typed rename to python/src/ouster/sdk/client/py.typed diff --git a/python/src/ouster/sdk/client/scan_source.py b/python/src/ouster/sdk/client/scan_source.py new file mode 100644 index 00000000..ad345b34 --- /dev/null +++ b/python/src/ouster/sdk/client/scan_source.py @@ -0,0 +1,80 @@ +from typing import Iterator, List, Union, Optional +from typing_extensions import Protocol +from ._client import SensorInfo, LidarScan +from .data import FieldTypes + + +class ScanSource(Protocol): + """Represents only data stream from one stream.""" + + @property + def metadata(self) -> SensorInfo: + """A list of Metadata objects associated with the scan streams.""" + ... + + @property + def is_live(self) -> bool: + """True if data obtained from the RUNNING sensor or as a stream from the socket + + Returns: + True if data obtained from the RUNNING sensor or as a stream from the socket + False if data is read from a stored media. Restarting an ``iter()`` means that + the data can be read again. + """ + ... + + @property + def is_seekable(self) -> bool: + """True for non-live sources, This property can be True regardless of scan source being indexed or not. + """ + ... + + @property + def is_indexed(self) -> bool: + """True for IndexedPcap and OSF scan sources, this property tells users whether the underlying source + allows for random access of scans, see __getitem__. + """ + ... + + @property + def fields(self) -> FieldTypes: + """Field types are present in the LidarScan objects on read from iterator""" + ... + + @property + def scans_num(self) -> Optional[int]: + """Number of scans available, in case of a live sensor or non-indexable scan source this method + returns None""" + ... + + def __len__(self) -> int: + """Number of scans available, in case of a live sensor or non-indexable scan source this method + throws a TypeError""" + ... + + # NOTE: based on the underlying implemention the return type is + # Optional[LidarScan] since MultiScanSource returns collate scans by default. + # This can be solved by provide a method that gives access to uncollated scans + def __iter__(self) -> Iterator[Optional[LidarScan]]: + ... + + def _seek(self, key: int) -> None: + """seek/jump to a specific item within the list of LidarScan objects that this particular scan + source has access to""" + ... + + # NOTE: based on the underlying implemention the return type is + # Optional[LidarScan] since MultiScanSource returns collate scans by default. + # This can be solved by provide a method that gives access to uncollated scans + def __getitem__(self, key: Union[int, slice] + ) -> Union[Optional[LidarScan], List[Optional[LidarScan]]]: + """Indexed access and slices support""" + ... + + def close(self) -> None: + """Release the underlying resource, if any.""" + ... + + def __del__(self) -> None: + """Automatic release of any underlying resource.""" + ... diff --git a/python/src/ouster/sdk/client/scan_source_adapter.py b/python/src/ouster/sdk/client/scan_source_adapter.py new file mode 100644 index 00000000..f06c5124 --- /dev/null +++ b/python/src/ouster/sdk/client/scan_source_adapter.py @@ -0,0 +1,96 @@ +from typing import Iterator, List, Union, Optional +import typing +from .scan_source import ScanSource +from .multi_scan_source import MultiScanSource +from ._client import SensorInfo, LidarScan +from .data import FieldTypes + + +class ScanSourceAdapter(ScanSource): + """Represents only data stream from one stream.""" + + def __init__(self, scan_source: MultiScanSource, stream_idx: int = 0) -> None: + if stream_idx < 0 or stream_idx >= scan_source.sensors_count: + raise ValueError(f"stream_idx needs to be within the range [0, {scan_source.sensors_count})") + self._scan_source = scan_source + self._stream_idx = stream_idx + + @property + def metadata(self) -> SensorInfo: + """A list of Metadata objects associated with the scan streams.""" + return self._scan_source.metadata[self._stream_idx] + + @property + def is_live(self) -> bool: + """True if data obtained from the RUNNING sensor or as a stream from the socket + + Returns: + True if data obtained from the RUNNING sensor or as a stream from the socket + False if data is read from a stored media. Restarting an ``iter()`` means that + the data can be read again. + """ + return self._scan_source.is_live + + @property + def is_seekable(self) -> bool: + """True for non-live sources, This property can be True regardless of scan source being indexed or not. + """ + return self._scan_source.is_seekable + + @property + def is_indexed(self) -> bool: + """True for IndexedPcap and OSF scan sources, this property tells users whether the underlying source + allows for random access of scans, see __getitem__. + """ + return self._scan_source.is_indexed + + @property + def fields(self) -> FieldTypes: + """Field types are present in the LidarScan objects on read from iterator""" + return self._scan_source.fields[self._stream_idx] + + @property + def scans_num(self) -> Optional[int]: + """Number of scans available, in case of a live sensor or non-indexable scan source this method + returns None""" + return self._scan_source.scans_num[self._stream_idx] + + def __len__(self) -> int: + if self.scans_num is None: + raise TypeError("len is not supported on live or non-indexed sources") + return self.scans_num + + # NOTE: we need to consider a case without collation of scans + def __iter__(self) -> Iterator[Optional[LidarScan]]: + + def _stream_iter(source: MultiScanSource) -> Iterator[Optional[LidarScan]]: + for ls in source: + yield ls[self._stream_idx] + + return _stream_iter(self._scan_source) + + def _seek(self, key: int) -> None: + """seek/jump to a specific item within the list of LidarScan objects that this particular scan + source has access to""" + raise NotImplementedError + + def __getitem__(self, key: Union[int, slice] + ) -> Union[Optional[LidarScan], List[Optional[LidarScan]]]: + """Indexed access and slices support""" + if isinstance(key, int): + return self._scan_source[key][self._stream_idx] + elif isinstance(key, slice): + scans_list = self._scan_source[key] + scans_list = typing.cast(List[List[Optional[LidarScan]]], scans_list) + return [ls[self._stream_idx] for ls in scans_list] if scans_list else None + raise TypeError( + f"indices must be integer or slice, not {type(key).__name__}") + + # TODO: should this actually the parent scan source? any object why not + def close(self) -> None: + """Release the underlying resource, if any.""" + self._scan_source.close() + + def __del__(self) -> None: + """Automatic release of any underlying resource.""" + self.close() diff --git a/python/src/ouster/sdk/convert_to_legacy.py b/python/src/ouster/sdk/convert_to_legacy.py index e4e58bee..b9b6e79a 100644 --- a/python/src/ouster/sdk/convert_to_legacy.py +++ b/python/src/ouster/sdk/convert_to_legacy.py @@ -3,9 +3,11 @@ All rights reserved. """ +# TODO[UN]: move to ouster/cli/util + import argparse -from ouster import client +from ouster.sdk import client def main() -> None: diff --git a/python/src/ouster/sdk/examples/client.py b/python/src/ouster/sdk/examples/client.py index 9a6bcc80..2f3ee912 100644 --- a/python/src/ouster/sdk/examples/client.py +++ b/python/src/ouster/sdk/examples/client.py @@ -14,8 +14,8 @@ import numpy as np -from ouster import client -from ouster.client import LidarMode +from ouster.sdk import client +from ouster.sdk.client import LidarMode def configure_dual_returns(hostname: str) -> None: @@ -248,7 +248,7 @@ def record_pcap(hostname: str, n_seconds: max seconds of time to record. (Ctrl-Z correctly closes streams) """ - import ouster.pcap as pcap + import ouster.sdk.pcap as pcap from datetime import datetime # [doc-stag-pcap-record] diff --git a/python/src/ouster/sdk/examples/open3d.py b/python/src/ouster/sdk/examples/open3d.py index c67dfaa9..809bc1f5 100644 --- a/python/src/ouster/sdk/examples/open3d.py +++ b/python/src/ouster/sdk/examples/open3d.py @@ -16,8 +16,8 @@ "platforms. Try running `pip3 install open3d` first.") exit(1) -from ouster import client -from ouster.client import _utils +from ouster.sdk import client +from ouster.sdk.client import _utils from .colormaps import colorize Z_NEAR = 1.0 @@ -268,7 +268,7 @@ def update_data(vis: o3d.visualization.Visualizer): def main() -> None: import argparse import os - import ouster.pcap as pcap + import ouster.sdk.pcap as pcap descr = """Example visualizer using the open3d library. diff --git a/python/src/ouster/sdk/examples/osf.py b/python/src/ouster/sdk/examples/osf.py new file mode 100644 index 00000000..9e70a354 --- /dev/null +++ b/python/src/ouster/sdk/examples/osf.py @@ -0,0 +1,209 @@ +import argparse +import os +import numpy as np + + +def osf_read_scans(osf_file: str) -> None: + """Read Lidar Scans from an OSF file. + + Shows scans only for a single sensor, whatever happened to be the first stored in an OSF file. + """ + import ouster.sdk.osf as osf + # [doc-stag-osf-read-scans] + scans = osf.Scans(osf_file) + for scan in scans: + print(f'scan = {scan}, WxH={scan.w}x{scan.h}') + + # or with timestamps + for ts, scan in scans.withTs(): + print(f'ts = {ts}, scan = {scan}, WxH={scan.w}x{scan.h}') + # [doc-etag-osf-read-scans] + + +def osf_get_sensors_info(osf_file: str) -> None: + """Read Lidar Sensors info from an OSF file. + + Shows metadata for all sensors found in an OSF file. + """ + import ouster.sdk.osf as osf + # [doc-stag-osf-get-sensors-info] + reader = osf.Reader(osf_file) + # Get all stored sensors information + sensors = reader.meta_store.find(osf.LidarSensor) + for sensor_id, sensor_meta in sensors.items(): + info = sensor_meta.info + print(f"sensor[{sensor_id}] = ", info) + # [doc-etag-osf-get-sensors-info] + + +def osf_read_all_messages(osf_file: str) -> None: + """Read all message from an OSF file.""" + import ouster.sdk.osf as osf + # [doc-stag-osf-read-all-messages] + reader = osf.Reader(osf_file) + # Reading all messages + for msg in reader.messages(): + print(f'ts = {msg.ts}, stream_im = {msg.id}') + if msg.of(osf.LidarScanStream): + scan = msg.decode() + print(f' got lidar scan = {scan.h}x{scan.w}') + # [doc-etag-osf-read-all-messages] + + +def osf_check_layout(osf_file: str) -> None: + """Checks chunks layout of an OSF file. + + Open file and checks for the presence of osf.StreamingInfo metadata entry, which signals the + STREAMING layout OSF. Also reads StreamingInfo and prints message counts and avg size of per + stream. + + NOTE: All OSFs produced from June 15, 2022 have the STREAMING layout which the + default and only layout available. + """ + import ouster.sdk.osf as osf + # [doc-stag-osf-check-layout] + reader = osf.Reader(osf_file) + # finds the first StreamingInfo metadata entry if any present + streaming_info = reader.meta_store.get(osf.StreamingInfo) + if streaming_info: + print("Stats available (STREAMING layout):") + for stream_id, stream_stat in streaming_info.stream_stats: + msg_cnt = stream_stat.message_count + msg_avg_size = stream_stat.message_avg_size + print(f" stream[{stream_id}]: msg_count = {msg_cnt},", + f"msg_avg_size = {msg_avg_size}") + else: + print("No stats available (STANDARD layout)") + # [doc-etag-osf-check-layout] + + +def osf_get_lidar_streams(osf_file: str) -> None: + """Reads info about available Lidar Scan streams in an OSF file. + + Find all LidarScanStream metadata entries and prints sensor_meta_id with encoded field types of + a LidarScan in a messages. + """ + import ouster.sdk.osf as osf + # [doc-stag-osf-get-lidar-streams] + reader = osf.Reader(osf_file) + lidar_streams = reader.meta_store.find(osf.LidarScanStream) + for stream_id, stream_meta in lidar_streams.items(): + sensor_id = stream_meta.sensor_meta_id + field_types = stream_meta.field_types + print(f"LidarScanStream[{stream_id}]:") + print(f" sensor_id = {sensor_id}") + print(f" field_types = {field_types}") + # [doc-etag-osf-get-lidar-streams] + + +def osf_slice_scans(osf_file: str) -> None: + """Copy scans from input OSF file with reduction using the Writer API. + + Slicing is done via saving only RANGE, SIGNAL and REFLECTIVITY fields into an output OSF files. + """ + from ouster.sdk import client + import ouster.sdk.osf as osf + # [doc-stag-osf-slice-scans] + # Scans reader from input OSF + scans = osf.Scans(osf_file) + + # New field types should be a subset of fields in encoded LidarScan so we just assume that + # RANGE, SIGNAL and REFLECTIVITY fields will be present in the input OSF file. + new_field_types = dict({ + client.ChanField.RANGE: np.dtype('uint32'), + client.ChanField.SIGNAL: np.dtype('uint16'), + client.ChanField.REFLECTIVITY: np.dtype('uint16') + }) + + output_file_base = os.path.splitext(os.path.basename(osf_file))[0] + output_file = output_file_base + '_sliced.osf' + + # Create Writer with a subset of fields to save (i.e. slicing will happen + # automatically on write) + writer = osf.Writer(output_file, scans.metadata, new_field_types) + + # Read scans and write back + for ts, scan in scans.withTs(): + print(f"writing sliced scan with ts = {ts}") + writer.save(0, scan, ts) + + writer.close() + # [doc-etag-osf-slice-scans] + + +def osf_split_scans(osf_file: str) -> None: + """Splits scans from input OSF into N=2 files. + + Spliting is done by timestamp. + """ + import ouster.sdk.osf as osf + # [doc-stag-osf-split-scans] + reader = osf.Reader(osf_file) + start_ts = reader.start_ts + end_ts = reader.end_ts + n_splits = 2 + split_dur = int((end_ts - start_ts) / n_splits) + + # Scans reader from input OSF + scans = osf.Scans(osf_file) + + output_file_base = os.path.splitext(os.path.basename(osf_file))[0] + + # Create N writers and create N output Lidar Streams to write too + writers = [] + for i in range(n_splits): + writers.append(osf.Writer(f"{output_file_base}_s{i:02d}.osf", scans.metadata)) + + # Read scans and write to a corresponding output stream + for ts, scan in scans.withTs(): + split_idx = int((ts - start_ts) / split_dur) + print(f"writing scan to split {split_idx:02d} file") + writers[split_idx].save(0, scan) + + # No need to call close, underlying writers will close automatically on destroy + # [doc-etag-osf-split-scans] + + +def main(): + """OSF examples runner.""" + examples = { + "read-scans": osf_read_scans, + "read-messages": osf_read_all_messages, + "split-scans": osf_split_scans, + "slice-scans": osf_slice_scans, + "get-lidar-streams": osf_get_lidar_streams, + "get-sensors-info": osf_get_sensors_info, + "check-layout": osf_check_layout + } + + description = "Ouster Python SDK OSF examples. The EXAMPLE must be one of:\n " + str.join( + '\n ', examples.keys()) + + parser = argparse.ArgumentParser( + description=description, formatter_class=argparse.RawTextHelpFormatter) + + parser.add_argument('osf_path', metavar='OSF', help='path to osf file') + parser.add_argument('example', + metavar='EXAMPLE', + choices=examples.keys(), + help='name of the example to run') + parser.add_argument('--scan-num', + type=int, + default=1, + help='index of scan to use') + args = parser.parse_args() + + try: + example = examples[args.example] + except KeyError: + print(f"No such example: {args.example}") + print(description) + exit(1) + + print(f'example: {args.example}') + + example(osf_file=args.osf_path) # type: ignore + + +if __name__ == "__main__": + main() diff --git a/python/src/ouster/sdk/examples/pcap.py b/python/src/ouster/sdk/examples/pcap.py index 59f49f98..67f00b35 100644 --- a/python/src/ouster/sdk/examples/pcap.py +++ b/python/src/ouster/sdk/examples/pcap.py @@ -13,7 +13,7 @@ from contextlib import closing import numpy as np -from ouster import client, pcap +from ouster.sdk import client, pcap from .colormaps import normalize diff --git a/python/src/ouster/sdk/examples/reference.py b/python/src/ouster/sdk/examples/reference.py index 6daeb3f2..4266ac72 100644 --- a/python/src/ouster/sdk/examples/reference.py +++ b/python/src/ouster/sdk/examples/reference.py @@ -11,7 +11,7 @@ import numpy as np -from ouster import client +from ouster.sdk import client # TODO: replace link when new FW 2.5/3.0 manual is up diff --git a/python/src/ouster/sdk/examples/viz.py b/python/src/ouster/sdk/examples/viz.py index aa677ff5..099f34ef 100644 --- a/python/src/ouster/sdk/examples/viz.py +++ b/python/src/ouster/sdk/examples/viz.py @@ -9,7 +9,7 @@ """ import argparse -from ouster import client, pcap, viz +from ouster.sdk import client, pcap, viz from ouster.sdk.util import resolve_metadata import os import sys diff --git a/python/src/ouster/sdk/io_type.py b/python/src/ouster/sdk/io_type.py new file mode 100644 index 00000000..b22c6ef2 --- /dev/null +++ b/python/src/ouster/sdk/io_type.py @@ -0,0 +1,60 @@ +import socket +import os +from enum import Enum, auto +from typing import Optional + + +class OusterIoType(Enum): + SENSOR = auto() + PCAP = auto() + OSF = auto() + BAG = auto() + CSV = auto() + PLY = auto() + PCD = auto() + LAS = auto() + + @staticmethod + def io_type_2_extension() -> dict: + return { + OusterIoType.PCAP: ".pcap", + OusterIoType.OSF: ".osf", + OusterIoType.BAG: ".bag", + OusterIoType.CSV: ".csv", + OusterIoType.PLY: ".ply", + OusterIoType.PCD: ".pcd", + OusterIoType.LAS: ".las" + } + + @staticmethod + def extension_2_io_type() -> dict: + return {value: key for key, value in OusterIoType.io_type_2_extension().items()} + + +def extension_from_io_type(source: OusterIoType) -> Optional[str]: + """Return a file extension for the given source type, if it's a file-based source.""" + return OusterIoType.io_type_2_extension().get(source) + + +def io_type_from_extension(source: str) -> OusterIoType: + """Return an OusterIoType given the file extension for the provided file path""" + ext = os.path.splitext(source)[1] + try: + return OusterIoType.extension_2_io_type()[ext.lower()] + except KeyError: + raise ValueError("Expecting", list( + OusterIoType.extension_2_io_type().keys())) + + +def io_type(source: str) -> OusterIoType: + """Return a OusterIoType given a source arg str""" + if os.path.isfile(source): + return io_type_from_extension(source) + try: + if socket.gethostbyname(source): + return OusterIoType.SENSOR + except Exception: + pass + + raise ValueError("Source type expected to be a sensor hostname, ip address," + " or a .pcap, .osf, or .bag file.") diff --git a/python/src/ouster/sdk/open_source.py b/python/src/ouster/sdk/open_source.py new file mode 100644 index 00000000..3b9cdfb1 --- /dev/null +++ b/python/src/ouster/sdk/open_source.py @@ -0,0 +1,137 @@ +from typing import List, Optional, Union +import numpy as np +from pathlib import Path +from ouster.sdk.client import ScanSource, MultiScanSource +import ouster.sdk.io_type +from ouster.sdk.io_type import OusterIoType +from ouster.sdk.osf import OsfScanSource +from ouster.sdk.pcap import PcapScanSource +from ouster.sdk.sensor import SensorScanSource + + +io_type_handlers = { + OusterIoType.SENSOR: SensorScanSource, + OusterIoType.PCAP: PcapScanSource, + OusterIoType.OSF: OsfScanSource +} + + +def open_source(source_url: str, sensor_idx: int = -1, *args, + extrinsics: Optional[Union[str, np.ndarray, List[np.ndarray]]] = None, + **kwargs) -> Union[ScanSource, MultiScanSource]: + """ + Parameters: + - source_url: could be a single url or many for the case of sensors. + the url can contain the path to a pcap file or an osf file. + alternatively, the url could contain a list of comma separated + sensor hostnames or ips (current multisensor is not supprted) + - sensor_idx: If sensor_idx is set to a postive number the function + returns a ScanSource instead of MultiScanSource which is simpler + but can can handle one source. sensor_idx shouldn't exceed the + number of scan sources that the source_url refers to. + - extrinsics: could be either a path to an extrinsics file, single 4x4 + numpy array or a list of 4x4 numpy arrays. In case a single 4x4 numpy + array was given while the scan_source had more than sensor then the + same extrinsics is copied and applied to all the sensors. If the list + of provided extrinsics exceeds the number of available sensors in + the scan source then the extra will be discarded. + Other Common Parameters + - cycle: loop when the stream ends, applies only to non-live sources. + - index: index the source before start if the format doesn't natively have + an index. doens't apply to live sources. + - flags: when this option is set, the FLAGS field will be added to the list + of fields of every scan. in case of dual returns profile FLAGS2 will also + be appended (default is True). + """ + source_urls = [url.strip() for url in source_url.split(',') if url.strip()] + + if len(source_urls) == 0: + raise ValueError("No valid source specified") + + if len(source_urls) > 1: + # This only applies to the live sensors case. + # TODO: revise once working on multi sensors + raise NotImplementedError( + "providing more than a single url is current not supported!") + + source_type: OusterIoType + scan_source: Optional[MultiScanSource] = None + try: + source_type = ouster.sdk.io_type.io_type(source_urls[0]) + handler = io_type_handlers[source_type] + scan_source = handler(source_urls[0], *args, **kwargs) + except KeyError: + raise NotImplementedError( + f"The io_type:{source_type} is not supported!") + except Exception as ex: + raise RuntimeError(f"Failed to create scan_source for url {source_urls}\n" + f" more details: {ex}") + + if scan_source is None: + raise RuntimeError( + f"Failed to create scan_source for url {source_urls}") + + _populate_extrinsics(scan_source, source_urls[0], source_type, extrinsics) + + if sensor_idx < 0: + return scan_source + + if sensor_idx < scan_source.sensors_count: + # return the simplifed single stream interface + return scan_source.single_source(sensor_idx) + + raise ValueError(f"source idx = {sensor_idx} value exceeds the number " + f"of available sensors = {scan_source.sensors_count} " + f"from the source {source_url}") + + +def _populate_extrinsics(scan_source: MultiScanSource, + source_url: str, + source_type: OusterIoType, + _extrinsics: Optional[Union[str, np.ndarray, List[np.ndarray]]] = None) -> None: + + from ouster.sdk.util import resolve_extrinsics, _parse_extrinsics_file + + extrinsics: Optional[List[np.ndarray]] = None + + if _extrinsics is not None: + # handle single numpy array case + if isinstance(_extrinsics, np.ndarray) and _extrinsics.shape == (4, 4): + extrinsics = [_extrinsics] * scan_source.sensors_count + # handle list of numpy array case + elif isinstance(_extrinsics, list) and all( + [isinstance(ext, np.ndarray) and ext.shape == (4, 4) + for ext in _extrinsics]): + extrinsics = _extrinsics + # handle extrinsics as a file path + elif isinstance(_extrinsics, str): + sensors_serial = [info.sn for info in scan_source.metadata] + extrinsics_from_file = _parse_extrinsics_file( + _extrinsics, sensors_serial) + if extrinsics_from_file: + extrinsics = [ext_f[0] + for ext_f in extrinsics_from_file if ext_f] + else: + print(f"warning: failed to load extrinsics from provided path: " + f"{_extrinsics}") + else: + raise ValueError( + f"Error whiles parsing supplied extrinsics {_extrinsics}") + else: + if source_type in [OusterIoType.PCAP, OusterIoType.OSF]: + source_dir = Path(source_url).absolute().parent + # print(f"examining the directory '{source_dir}' for any extrinsics") + extrinsics_from_file = resolve_extrinsics(data_path=source_dir, + infos=scan_source.metadata) + if extrinsics_from_file: + extrinsics = [ext_f[0] + for ext_f in extrinsics_from_file if ext_f] + + if extrinsics: + if len(extrinsics) < scan_source.sensors_count: + # TODO: should we handle the case when sensor_idx >= 0 + print("warning: loaded externsics doesn't match to the count of" + f"sensors. provided {len(extrinsics)}, expected: {scan_source.sensors_count}") + for i in range(scan_source.sensors_count): + if extrinsics[i] is not None: + scan_source.metadata[i].extrinsic = extrinsics[i] diff --git a/python/src/ouster/sdk/osf/__init__.py b/python/src/ouster/sdk/osf/__init__.py new file mode 100644 index 00000000..463883c7 --- /dev/null +++ b/python/src/ouster/sdk/osf/__init__.py @@ -0,0 +1,30 @@ +""" +Copyright (c) 2021, Ouster, Inc. +All rights reserved. + +API to work with OSF files +""" +# flake8: noqa (unused imports) + +from ._osf import Reader +from ._osf import MessageRef +from ._osf import ChunkRef +from ._osf import MetadataStore +from ._osf import MetadataEntry +from ._osf import LidarSensor +from ._osf import Extrinsics # TODO: extrinsics should be factored out of osf +from ._osf import LidarScanStreamMeta +from ._osf import LidarScanStream +from ._osf import StreamStats +from ._osf import StreamingInfo +from ._osf import ChunksLayout +from ._osf import Writer + +from ._osf import slice_and_cast +from ._osf import dump_metadata +from ._osf import backup_osf_file_metablob +from ._osf import restore_osf_file_metablob +from ._osf import osf_file_modify_metadata + +from .data import Scans +from .osf_scan_source import OsfScanSource diff --git a/python/src/ouster/osf/_osf.pyi b/python/src/ouster/sdk/osf/_osf.pyi similarity index 72% rename from python/src/ouster/osf/_osf.pyi rename to python/src/ouster/sdk/osf/_osf.pyi index 316d540e..4929c235 100644 --- a/python/src/ouster/osf/_osf.pyi +++ b/python/src/ouster/sdk/osf/_osf.pyi @@ -5,7 +5,7 @@ from typing import Any, ClassVar, List from typing import (overload, Iterator) import numpy -from ouster.client import BufferT, LidarScan +from ouster.sdk.client import BufferT, LidarScan, SensorInfo class ChunkRef: @@ -48,18 +48,21 @@ class LidarScanStreamMeta: class LidarScanStream: type_id: ClassVar[str] = ... # read-only - def __init__(self, writer, sensor_meta_id: int, field_types = ...) -> None: ... - def save(self, ts: int, ls) -> None: ... class LidarSensor(MetadataEntry): type_id: ClassVar[str] = ... # read-only - def __init__(self, arg0: str) -> None: ... + + @overload + def __init__(self, arg0: SensorInfo) -> None: ... + @overload + def __init__(self, metadata_json: str) -> None: ... @property def info(self) -> Any: ... @property def metadata(self) -> str: ... + class Extrinsics(MetadataEntry): type_id: ClassVar[str] = ... # read-only def __init__(self, extrinsics: numpy.ndarray, ref_meta_id: int = ..., name: str = ...) -> None: ... @@ -112,7 +115,6 @@ class MetadataStore: class Reader: def __init__(self, arg0: str) -> None: ... def chunks(self) -> Iterator: ... - def messages_standard(self) -> Iterator: ... @overload def messages(self) -> Iterator: ... @overload @@ -124,13 +126,16 @@ class Reader: @property def end_ts(self) -> int: ... @property - def id(self) -> str: ... + def metadata_id(self) -> str: ... @property def meta_store(self) -> Any: ... @property def start_ts(self) -> int: ... @property def has_stream_info(self) -> bool: ... + @property + def has_message_idx(self) -> bool: ... + def ts_by_message_idx(self, stream_id: int, msg_idx: int) -> int: ... class StreamStats: @@ -158,20 +163,38 @@ class StreamingInfo(MetadataEntry): class Writer: @overload - def __init__(self, file_name: str) -> None: ... - + def __init__(self, file_name: str, chunk_size: int = ...) -> None: ... @overload - def __init__(self, file_name: str, metadata_id: str, - chunk_size: int = ...) -> None: ... - - def addMetadata(self, arg0: object) -> int: ... - def saveMessage(self, stream_id: int, ts: int, buffer: BufferT) -> int: ... - def close(self) -> None: ... - @property + def __init__(self, filename: str, info: SensorInfo, + field_types = ..., chunk_size: int = ...) -> None: ... + @overload + def __init__(self, filename: str, info: List[SensorInfo], + field_types = ..., chunk_size: int = ...) -> None: ... + @overload + def save(self, stream_id: int, scan: LidarScan) -> None: ... + @overload + def save(self, stream_id: int, scan: LidarScan, ts: int) -> None: ... + @overload + def save(self, stream_id: int, scan: List[LidarScan]) -> None: ... + def add_sensor(self, info: SensorInfo, field_types = ...) -> int: ... + def add_metadata(self, arg0: object) -> int: ... + def save_message(self, stream_id: int, ts: int, buffer: BufferT) -> int: ... + @overload + def sensor_info(self) -> List[SensorInfo]: ... + @overload + def sensor_info(self, stream_id: int) -> SensorInfo: ... + def sensor_info_count(self) -> int: ... def filename(self) -> str: ... + def metadata_id(self) -> str: ... + def set_metadata_id(self, id: str) -> None: ... @property def meta_store(self) -> MetadataStore: ... + def close(self) -> None: ... + def is_closed(self) -> bool: ... + def __enter__(self) -> Writer: ... + def __exit__(*args) -> None: ... + def slice_and_cast(lidar_scan: LidarScan, field_types = ...) -> LidarScan: ... def init_logger(log_level: str, @@ -184,6 +207,6 @@ def init_logger(log_level: str, def dump_metadata(file: str, full: bool = ...) -> str: ... def parse_and_print(file: str, with_decoding: bool = ...) -> None: ... - -def pcap_to_osf(file: str, meta: str, lidar_port: int, osf_filename: str, - chunks_layout: str = ..., chunk_size: int = ...) -> bool: ... +def backup_osf_file_metablob(file: str, backup_file_name: str) -> None: ... +def restore_osf_file_metablob(file: str, backup_file_name: str) -> None: ... +def osf_file_modify_metadata(file: str, new_metadata: List[SensorInfo]) -> int: ... diff --git a/python/src/ouster/sdk/osf/data.py b/python/src/ouster/sdk/osf/data.py new file mode 100644 index 00000000..0008e9e2 --- /dev/null +++ b/python/src/ouster/sdk/osf/data.py @@ -0,0 +1,127 @@ +from typing import List, Optional, Union, cast, Iterator, Tuple + +from ouster.sdk import client +from ouster.sdk.client.data import FieldTypes +from ouster.sdk.client import ScanSource +from ouster.sdk.client._client import LidarScan +import ouster.sdk.osf as osf + + +class Scans(ScanSource): + """An iterable stream of ``LidarScan`` read from OSF file (for the first available sensor).""" + + def __init__(self, + osf_file: str, + *, + cycle: bool = False, + sensor_id: int = 0): + """ + Args: + osf_file: OSF filename as scans source + cycle: repeat infinitely after iteration is finished is True + sensor_id: id of the sensor which LidarScan stream data to read + (i.e. id of the metadata entry with ``osf.LidarSensor`` type). + 0 (default) means that first LidarSensor from the OSF is used. + """ + self._reader = osf.Reader(osf_file) + self._cycle = cycle + self._sensor_id = sensor_id + + if self._sensor_id: + # sensor_id is passed so we can get the sensor metadata + # entry directly by metadata entry id + sensor_meta = self._reader.meta_store[self._sensor_id] + if sensor_meta and sensor_meta.of(osf.LidarSensor): + self._sensor = sensor_meta + else: + raise ValueError(f"Error: Sensor is not found by sensor_id: " + f" {self._sensor_id}") + else: + # sensor_id is not provided, so we get the first + # osf.LidarSensor metadata entry and use its stream + sensor_meta = self._reader.meta_store.get(osf.LidarSensor) + if not sensor_meta: + raise ValueError("Error: No sensors found in OSF file") + self._sensor = sensor_meta + + # check for Extrinsics + extrinsics = self._reader.meta_store.find(osf.Extrinsics) + for _, v in extrinsics.items(): + if v.ref_meta_id == self._sensor.id: + print(f"Found extrinsics for sensor[{self._sensor.id}]:\n", + v.extrinsics) + self._sensor.info.extrinsic = v.extrinsics + + # Find the corresponding stream_id for the sensor + scan_streams = self._reader.meta_store.find(osf.LidarScanStream) + self._sensor_stream_id = next((mid for mid, m in scan_streams.items() + if m.sensor_meta_id == self._sensor.id), + 0) + if not self._sensor_stream_id: + raise ValueError(f"Error: No LidarScan stream found for sensor" + f" id:{self._sensor.id} in an OSF file") + + def __iter__(self) -> Iterator[client.LidarScan]: + """Iterator that returns ``LidarScan`` objects.""" + for _, ls in self.withTs(): + yield ls + + def withTs(self) -> Iterator[Tuple[int, client.LidarScan]]: + """Iterator that returns tuple of (``ts``, ``LidarScan``) + + Where ``ts`` - is a timestamp (ns) of a ``LidarScan`` (usually as a + timestamp of a first packet in a ``LidarScan``) + """ + while True: + # TODO[pb]: Read only specified _sensor_stream_id stream + for msg in self._reader.messages([self._sensor_stream_id], + self._reader.start_ts, + self._reader.end_ts): + if msg.id == self._sensor_stream_id: + scan = msg.decode() + if scan: + yield msg.ts, cast(client.LidarScan, scan) + if not self._cycle: + break + + def close(self) -> None: + # TODO[pb]: Do the close for Reader? + pass + + @property + def metadata(self) -> client.SensorInfo: + """Return metadata of a Lidar Sensor used.""" + return self._sensor.info + + @property + def is_live(self) -> bool: + return False + + @property + def is_seekable(self) -> bool: + return False + + @property + def is_indexed(self) -> bool: + return False # TODO: for now we just use False no matter what + + @property + def fields(self) -> FieldTypes: + return client.get_field_types(self.metadata) + + @property + def scans_num(self) -> Optional[int]: + raise NotImplementedError # TODO: implement + + def __len__(self) -> int: + raise NotImplementedError # TODO: implement + + def _seek(self, key: int) -> None: + pass + + def __getitem__(self, key: Union[int, slice] + ) -> Union[Optional[LidarScan], List[Optional[LidarScan]]]: + raise NotImplementedError + + def __del__(self) -> None: + pass diff --git a/python/src/ouster/sdk/osf/multi.py b/python/src/ouster/sdk/osf/multi.py new file mode 100644 index 00000000..7b79480d --- /dev/null +++ b/python/src/ouster/sdk/osf/multi.py @@ -0,0 +1,2 @@ +raise RuntimeError("The ouster.osf.multi has been renamed to ouster.osf.osf_scan_source under" +"the ouster.osf package. THIS FILE WILL BE REMOVED IN NEXT RELEASE") diff --git a/python/src/ouster/sdk/osf/osf_scan_source.py b/python/src/ouster/sdk/osf/osf_scan_source.py new file mode 100644 index 00000000..dbb03022 --- /dev/null +++ b/python/src/ouster/sdk/osf/osf_scan_source.py @@ -0,0 +1,294 @@ +from typing import cast, Iterator, Dict, Optional, List, Tuple, Union + +from more_itertools import ilen +from ouster.sdk import client +from ouster.sdk.client import LidarScan, SensorInfo, first_valid_packet_ts +from ouster.sdk.client import ScanSource, MultiScanSource + +from ouster.sdk.osf._osf import (Reader, Writer, MessageRef, LidarSensor, + Extrinsics, LidarScanStream, StreamingInfo) + +from ouster.sdk.client.multi import collate_scans # type: ignore +from ouster.sdk.util import ForwardSlicer, progressbar # type: ignore + + +class OsfScanSource(MultiScanSource): + """Implements MultiScanSource protocol using OSF Reader with multiple sensors.""" + + def __init__( + self, + file_path: str, + *, + dt: int = 10**8, + complete: bool = False, + index: bool = False, + cycle: bool = False, + flags: bool = True, + **kwargs + ) -> None: + """ + Args: + file_path: OSF file path to open as a scan source + dt: max time difference between scans in the collated scan (i.e. + time period at which every new collated scan is released/cut), + default is 0.1s + complete: set to True to only release complete scans (not implemnted) + index: if this flag is set to true an index will be built for the osf + file enabling len, index and slice operations on the scan source, if + the flag is set to False indexing is skipped (default is False). + cycle: repeat infinitely after iteration is finished (default is False) + flags: when this option is set, the FLAGS field will be added to the list + of fields of every scan, in case of dual returns FLAGS2 will also be + appended (default is True). + + Remarks: + In case the OSF file didn't have builtin-index and the index flag was + was set to True the object will attempt to index the file in place. + """ + + self._complete = complete + self._indexed = index + + if 'meta' in kwargs and kwargs['meta']: + raise TypeError( + f"{OsfScanSource.__name__} does not support user-supplied metadata.") + + self._reader = Reader(file_path) + + if not self._reader.has_message_idx: + if index: + print("OSF file not indexed! re-indexing file inplace...") + try: + self._reindex_osf_inplace(self._reader, file_path) + except RuntimeError as e: + print(f"Failed re-indexing OSF file!\n more details: {e}") + self._indexed = False + self._reader = Reader(file_path) + else: + print("OSF file not indexed, indexing not requested!") + + self._cycle = cycle + self._dt = dt + + self._sensors = [(sid, sm) for sid, sm in self._reader.meta_store.find( + LidarSensor).items()] + + # map stream_id to metadata entry + self._sensor_idx: Dict[int, int] + self._sensor_idx = { + sid: sidx + for sidx, (sid, _) in enumerate(self._sensors) + } + + # load stored extrinsics (if any) + extrinsics = self._reader.meta_store.find(Extrinsics) + for _, v in extrinsics.items(): + if v.ref_meta_id in self._sensor_idx: + sidx = self._sensor_idx[v.ref_meta_id] + print(f"OSF: stored extrinsics for sensor[{sidx}]:\n", + v.extrinsics) + self._sensors[sidx][1].info.extrinsic = v.extrinsics + + self._metadatas = [sm.info for _, sm in self._sensors] + + # map stream_id to metadata entry + self._stream_sensor_idx: Dict[int, int] + self._stream_sensor_idx = {} + for stream_type in [LidarScanStream]: + for stream_id, stream_meta in self._reader.meta_store.find( + stream_type).items(): + self._stream_sensor_idx[stream_id] = self._sensor_idx[ + stream_meta.sensor_meta_id] + + def append_flags(ftypes: Dict, flags: bool) -> Dict: + import numpy as np + if flags: + ftypes.update({client.ChanField.FLAGS: np.uint8}) + if client.ChanField.RANGE2 in ftypes: + ftypes.update({client.ChanField.FLAGS2: np.uint8}) + return ftypes + + scan_streams = self._reader.meta_store.find(LidarScanStream) + self._stream_ids = [mid for mid, _ in scan_streams.items()] + self._fields = [append_flags(lss.field_types, flags) + for _, lss in scan_streams.items()] + # TODO: the following two properties (_scans_num, _len) are computed on + # load but should rather be provided directly through OSF API. Obtain + # these values directly from OSF API once implemented. + if self._indexed: + start_ts = self._reader.start_ts + end_ts = self._reader.end_ts + self._scans_num = [ilen(self._msgs_iter_stream( + mid, start_ts, end_ts)) for mid in self._stream_ids] + self._len = ilen(collate_scans(self._msgs_iter( + self._stream_ids, start_ts, end_ts, False), + self.sensors_count, lambda msg: cast(MessageRef, msg).ts, dt=self._dt)) + + def _osf_convert(self, reader: Reader, output: str) -> None: + # TODO: figure out how to get the current chunk_size + chunk_size = 0 + writer = Writer(output, chunk_size) + writer.set_metadata_id(reader.metadata_id) + for _, m in reader.meta_store.items(): + if m.of(StreamingInfo): + # StreamingInfo is always generated by Writer automatically in + # default STREAMING chunks layout, so we don't copy the original + continue + writer.add_metadata(m) + # convert + if not reader.has_stream_info: + writer.close() + raise Exception("Standard Message Layout No Longer Supported") + msgs = reader.messages() + msgs_count = ilen(msgs) + msgs = reader.messages() + for idx, msg in enumerate(msgs): + writer.save_message(msg.id, msg.ts, msg.buffer) + progressbar(idx, msgs_count, "", "indexed") + print("\nfinished building index") + writer.close() + + def _reindex_osf_inplace(self, reader, osf_file): + import tempfile + with tempfile.NamedTemporaryFile(delete=True) as f: + self._osf_convert(reader, f.name) + try: + import shutil + shutil.copy2(f.name, osf_file) + except OSError as e: + raise RuntimeError(f"Error overwriteing osf file: {osf_file}" + f"\nmore details: {e}") + + def _msgs_iter_stream(self, stream_id: int, start_ts: int, stop_ts: int + ) -> Iterator[MessageRef]: + for _, msg in self._msgs_iter([stream_id], start_ts, stop_ts, False): + yield msg + + def _msgs_iter(self, stream_ids: List[int], start_ts: int, stop_ts: int, cycle: bool + ) -> Iterator[Tuple[int, MessageRef]]: + while True: + had_message = False + for msg in self._reader.messages(stream_ids, start_ts, stop_ts): + if msg.of(LidarScanStream): + sidx = self._stream_sensor_idx[msg.id] + had_message = True + yield sidx, msg + # exit if we had no messages to prevent an infinite loop + if not cycle or not had_message: + break + + def _scans_iter(self, start_ts: int, stop_ts: int, cycle: bool + ) -> Iterator[Tuple[int, LidarScan]]: + for idx, msg in self._msgs_iter(self._stream_ids, start_ts, stop_ts, cycle): + ls = msg.decode() + if ls: + window = self.metadata[idx].format.column_window + scan = cast(LidarScan, ls) + if not self._complete or scan.complete(window): + if set(scan.fields) != set(self._fields[idx].keys()): + scan = client.LidarScan(scan, self._fields[idx]) + yield idx, scan + + @property + def sensors_count(self) -> int: + return len(self._stream_ids) + + @property + def metadata(self) -> List[SensorInfo]: + return self._metadatas + + @property + def is_live(self) -> bool: + return False + + @property + def is_seekable(self) -> bool: + return True + + @property + def is_indexed(self) -> bool: + return self._indexed + + @property + def fields(self) -> List[client.FieldTypes]: + """Field types are present in the LidarScan objects on read from iterator""" + return self._fields + + @property + def scans_num(self) -> List[Optional[int]]: + return self._scans_num # type: ignore + + def __len__(self) -> int: + if not self.is_indexed: + raise TypeError("len is not supported on non-indexed source") + return self._len + + def __iter__(self) -> Iterator[List[Optional[LidarScan]]]: + msgs_itr = self._scans_iter( + self._reader.start_ts, self._reader.end_ts, self._cycle) + return collate_scans(msgs_itr, self.sensors_count, first_valid_packet_ts, dt=self._dt) + + def _seek(self, key: int) -> None: + """seek/jump to a specific item within the list of LidarScan objects that this particular scan + source has access to""" + ... + + def __getitem__(self, key: Union[int, slice] + ) -> Union[List[Optional[LidarScan]], List[List[Optional[LidarScan]]]]: + + if not self.is_indexed: + raise TypeError( + "can not invoke __getitem__ on non-indexed source") + + scans_itr: Iterator[Tuple[int, LidarScan]] + + if isinstance(key, int): + L = len(self) + if key < 0: + key += L + if key < 0 or key >= L: + raise IndexError("index is out of range") + ts = [self._reader.ts_by_message_idx( + mid, key) for mid in self._stream_ids] + ts_start = min(ts) + ts_stop = min(ts_start + self._dt, max(ts)) + scans_itr = self._scans_iter(ts_start, ts_stop, False) + return next(collate_scans(scans_itr, self.sensors_count, + first_valid_packet_ts, dt=self._dt)) + + if isinstance(key, slice): + L = len(self) + k = ForwardSlicer.normalize(key, L) + count = k.stop - k.start + if count <= 0: + return [] + ts_start = min([self._reader.ts_by_message_idx(mid, k.start) + for mid in self._stream_ids]) + ts_stop = max([self._reader.ts_by_message_idx(mid, k.stop - 1) + for mid in self._stream_ids]) + scans_itr = collate_scans(self._scans_iter(ts_start, ts_stop, False), + self.sensors_count, first_valid_packet_ts, + dt=self._dt) + result = [scan for idx, scan in ForwardSlicer.slice( + enumerate(scans_itr), k) if idx < count] + return result if k.step > 0 else list(reversed(result)) + + raise TypeError( + f"indices must be integer or slice, not {type(key).__name__}") + + def close(self) -> None: + """Close osf file.""" + # TODO[pb]: Need to add Reader.close() method, because now it's + # all happens in dtor, which is not very clear by lifecycle. + if self._reader: + del self._reader + self._reader = None # type: ignore + + def __del__(self) -> None: + """Automatic release of any underlying resource.""" + # self.close() # TODO: currently this causes an exception, avoid + pass + + def single_source(self, stream_idx: int) -> ScanSource: + from ouster.sdk.client.scan_source_adapter import ScanSourceAdapter + return ScanSourceAdapter(self, stream_idx) diff --git a/python/src/ouster/sdkx/py.typed b/python/src/ouster/sdk/osf/py.typed similarity index 100% rename from python/src/ouster/sdkx/py.typed rename to python/src/ouster/sdk/osf/py.typed diff --git a/python/src/ouster/sdk/pcap/__init__.py b/python/src/ouster/sdk/pcap/__init__.py new file mode 100644 index 00000000..fa85b212 --- /dev/null +++ b/python/src/ouster/sdk/pcap/__init__.py @@ -0,0 +1,16 @@ +""" +Copyright (c) 2021, Ouster, Inc. +All rights reserved. + +Pcap tools to record/read/write Ouster sensor data.""" +# flake8: noqa: F401 (unused imports) + +from .pcap import Pcap +from .pcap import record +from .pcap import _guess_ports +from .pcap import _packet_info_stream +from .pcap import _replay +from .pcap_multi_packet_reader import PcapMultiPacketReader +from .pcap_scan_source import PcapScanSource +from .packet_iter import RecordingPacketSource +from .packet_iter import BagRecordingPacketSource diff --git a/python/src/ouster/pcap/_pcap.pyi b/python/src/ouster/sdk/pcap/_pcap.pyi similarity index 75% rename from python/src/ouster/pcap/_pcap.pyi rename to python/src/ouster/sdk/pcap/_pcap.pyi index 9ee0602e..7e66cb82 100644 --- a/python/src/ouster/pcap/_pcap.pyi +++ b/python/src/ouster/sdk/pcap/_pcap.pyi @@ -5,9 +5,9 @@ All rights reserved. Type annotations for pcap python bindings. """ -from typing import (overload, List, Callable) +from typing import (Dict, overload, List, Callable) -from ..client.data import BufferT +from ouster.sdk.client.data import BufferT class playback_handle: @@ -90,6 +90,44 @@ class packet_info: def network_protocol(self) -> int: ... +class PcapIndex: + + def __init__(self, int) -> None: + ... + + @property + def frame_id_indices(self) -> List[Dict[int, int]]: + ... + + def frame_count(self, int) -> int: + ... + +class IndexedPcapReader: + + def __init__(self, filename: str, metadata_filename: List[str]) -> None: + ... + + def build_index(self) -> None: + ... + + def next_packet(self) -> int: + ... + + def current_info(self) -> packet_info: + ... + + def current_data(self) -> BufferT: + ... + + def get_index(self) -> PcapIndex: + ... + + def seek(self, int) -> None: + ... + + def reset(self) -> None: + ... + def replay_initialize(file_name: str) -> playback_handle: ... diff --git a/python/src/ouster/sdk/pcap/packet_iter.py b/python/src/ouster/sdk/pcap/packet_iter.py new file mode 100644 index 00000000..8f40b8e7 --- /dev/null +++ b/python/src/ouster/sdk/pcap/packet_iter.py @@ -0,0 +1,225 @@ +import os +import time +from typing import (Callable, Iterable, Iterator, TypeVar, + Optional, Any) + +from more_itertools import consume + +from ouster.sdk.client import (Packet, PacketMultiSource, LidarPacket, ImuPacket, FrameBorder) +from ouster.sdk.pcap.pcap import MTU_SIZE +import ouster.sdk.pcap._pcap as _pcap + + +T = TypeVar('T') + + +def ichunked_before(it: Iterable[T], + pred: Callable[[T], bool]) -> Iterator[Iterator[T]]: + """Return the given stream chunked by the predicate. + + Each sub-iterator will be fully consumed when the next chunk is + requested. No caching of unused items is performed, so client code should + evaluate sub-iterators (e.g. into lists) to avoid dropping items. + + This should behave same as more_itertools.split_before, except that chunks + aren't eagerly evaluated into lists. This makes it safe to use on streams + where it's possible that ``pred`` never evaluates to true. + """ + i = iter(it) + + # flag used by chunks to signal that the underlying iterator is exhausted + done = False + + # first item of the next chunk. See: nonlocal below + try: + t = next(i) + except StopIteration: + return + + def chunk() -> Iterator[T]: + nonlocal done, t + + yield t + for t in i: + if pred(t): + break + else: + yield t + # only if the iterator is exhausted + else: + done = True + + while not done: + c = chunk() + yield c + consume(c) + + +def ichunked_framed( + packets: Iterable[Packet], + pred: Callable[[Packet], + bool] = lambda _: True) -> Iterator[Iterator[Packet]]: + """Delimit a packets when the frame id changes and pred is true.""" + + return ichunked_before(packets, FrameBorder(pred)) + + +def n_frames(packets: Iterable[Packet], n: int) -> Iterator[Packet]: + for i, frame in enumerate(ichunked_framed(packets)): + if i < n: + yield from frame + else: + break + + +# TODO: these currently account for SensorScanSource being based on Scans internally and will +# require rework once that has a proper ScansMulti implementation -- Tim T. +class RecordingPacketSource: + # TODO: deduplicate this & pcap.record + def __init__(self, + source: PacketMultiSource, + prefix_path: str, + *, + sensor_idx: int = 0, + n_seconds: float = 0.0, + n_frames: Optional[int], + chunk_size: int = 0, + src_ip: str = "127.0.0.1", + dst_ip: str = "127.0.0.1", + lidar_port: int = 7502, + imu_port: int = 7503, + use_sll_encapsulation: bool = False, + overwrite: bool = True): + self.source = source + self.sensor_idx = sensor_idx + self.prefix_path = prefix_path + self.n_seconds = n_seconds + self.n_frames = n_frames + self.chunk_size = chunk_size + self.src_ip = src_ip + self.dst_ip = dst_ip + self.lidar_port = lidar_port + self.imu_port = imu_port + self.use_sll_encapsulation = use_sll_encapsulation + self.overwrite = overwrite + + @property # type: ignore + def __class__(self): + # report the class of the wrapped packet source + return self.source.__class__ + + def __iter__(self): + has_timestamp = None + error = False + n = 0 + + metadata = self.source.metadata + if type(metadata) is list: + metadata = metadata[self.sensor_idx] + + frame_bound = FrameBorder() + + chunk = 0 + pcap_path = self.prefix_path + f"-{chunk:03}.pcap" + print(f"Saving PCAP file at {pcap_path}") + if os.path.isfile(pcap_path) and not self.overwrite: + raise FileExistsError(f"File '{pcap_path}' already exists") + + try: + start_time = time.time() + num_frames = 0 + handle = _pcap.record_initialize(pcap_path, MTU_SIZE, + self.use_sll_encapsulation) + for next_packet in self.source: + idx, packet = next_packet if (type(next_packet) is tuple) else (None, next_packet) + if (idx is None) or (idx == self.sensor_idx): + if isinstance(packet, LidarPacket): + src_port = self.lidar_port + dst_port = self.lidar_port + elif isinstance(packet, ImuPacket): + src_port = self.imu_port + dst_port = self.imu_port + else: + raise ValueError("Unexpected packet type") + + if has_timestamp is None: + has_timestamp = (packet.capture_timestamp is not None) + elif has_timestamp != (packet.capture_timestamp is not None): + raise ValueError("Mixing timestamped/untimestamped packets") + + ts = packet.capture_timestamp or time.time() + _pcap.record_packet(handle, self.src_ip, self.dst_ip, src_port, dst_port, packet._data, ts) + + if frame_bound(packet): + num_frames += 1 + if self.chunk_size and os.path.getsize(pcap_path) > self.chunk_size * 2**20: + # file size exceeds chunk size; create a new chunk + chunk += 1 + pcap_path = self.prefix_path + f"-{chunk:03}.pcap" + print(f"Saving PCAP file at {pcap_path}") + _pcap.record_uninitialize(handle) + if os.path.isfile(pcap_path) and not self.overwrite: + raise FileExistsError(f"File '{pcap_path}' already exists") + handle = _pcap.record_initialize(pcap_path, MTU_SIZE, + self.use_sll_encapsulation) + if (self.n_frames and num_frames > self.n_frames) or \ + (self.n_seconds and time.time() - start_time > self.n_seconds): + break + n += 1 + yield next_packet + except Exception: + error = True + raise + finally: + _pcap.record_uninitialize(handle) + if error and os.path.exists(pcap_path) and n == 0: + os.remove(pcap_path) + + def __getattr__(self, attr): + # forward all other calls to self.source + return self.source.__getattribute__(attr) + + +# TODO: these currently account for SensorScanSource being based on Scans internally and will +# require rework once that has a proper ScansMulti implementation -- Tim T. +class BagRecordingPacketSource: + def __init__(self, packet_source: PacketMultiSource, + filename: str, sensor_idx: int = 0, + lidar_topic: str = "/os_node/lidar_packets", + imu_topic: str = "/os_node/imu_packets"): + self.packet_source = packet_source + self.filename = filename + self.sensor_idx = sensor_idx + self.lidar_topic = lidar_topic + self.imu_topic = imu_topic + + @property # type: ignore + def __class__(self): + # report the class of the wrapped packet source + return self.packet_source.__class__ + + def __iter__(self): + from ouster.cli.core.util import import_rosbag_modules + import_rosbag_modules(raise_on_fail=True) + + from ouster.sdk.bag import PacketMsg # type: ignore + import rosbag # type: ignore + import rospy # type: ignore + try: + with rosbag.Bag(self.filename, 'w') as outbag: + for next_packet in self.packet_source: + idx, packet = next_packet if (type(next_packet) is tuple) else (None, next_packet) + if (idx is None) or (idx == self.sensor_idx): + ts = rospy.Time.from_sec(packet.capture_timestamp) + msg = PacketMsg(buf=packet._data.tobytes()) + if isinstance(packet, LidarPacket): + outbag.write(self.lidar_topic, msg, ts) + elif isinstance(packet, ImuPacket): + outbag.write(self.imu_topic, msg, ts) + yield next_packet + except (KeyboardInterrupt, StopIteration): + pass + + def __getattr__(self, attr: str) -> Any: + # forward all other calls to self.source + return self.packet_source.__getattribute__(attr) diff --git a/python/src/ouster/pcap/pcap.py b/python/src/ouster/sdk/pcap/pcap.py similarity index 96% rename from python/src/ouster/pcap/pcap.py rename to python/src/ouster/sdk/pcap/pcap.py index fb790f9d..0c1851af 100644 --- a/python/src/ouster/pcap/pcap.py +++ b/python/src/ouster/sdk/pcap/pcap.py @@ -10,7 +10,7 @@ from collections import defaultdict from typing import (Iterable, Iterator, Optional, Tuple, Dict) # noqa: F401 -from ouster.client import (LidarPacketValidator, LidarPacket, ImuPacket, Packet, PacketSource, # noqa: F401 +from ouster.sdk.client import (LidarPacketValidator, LidarPacket, ImuPacket, Packet, PacketSource, # noqa: F401 SensorInfo, _client, PacketValidationFailure, PacketIdError) # noqa: F401 from . import _pcap @@ -51,7 +51,7 @@ def __init__(self, lidar_port: Optional[int] = None, imu_port: Optional[int] = None, loop: bool = False, - _soft_id_check: bool = False): + soft_id_check: bool = False): """Read a single sensor data stream from a packet capture. Packet captures can contain arbitrary network traffic or even multiple @@ -78,7 +78,7 @@ def __init__(self, lidar_port: Specify the destination port of lidar packets imu_port: Specify the destination port of imu packets loop: Specify whether to reload the PCAP file when the end is reached - _soft_id_check: if True, don't skip lidar packets buffers on init_id/sn mismatch + soft_id_check: if True, don't skip lidar packets buffers on init_id/sn mismatch """ # prefer explicitly specified ports (can probably remove the args?) @@ -91,7 +91,7 @@ def __init__(self, self._metadata.udp_port_imu = imu_port self.loop = loop - self._soft_id_check = _soft_id_check + self._soft_id_check = soft_id_check self._id_error_count = 0 # TWS 20230615 TODO generialize error counting and reporting self._errors = defaultdict(int) # type: Dict[PacketValidationFailure,int] @@ -169,6 +169,10 @@ def __iter__(self) -> Iterator[Packet]: # of bad packet size or init_id/sn errors pass + @property + def is_live(self) -> bool: + return False + @property def metadata(self) -> SensorInfo: return self._metadata diff --git a/python/src/ouster/sdk/pcap/pcap_multi_packet_reader.py b/python/src/ouster/sdk/pcap/pcap_multi_packet_reader.py new file mode 100644 index 00000000..d109ae70 --- /dev/null +++ b/python/src/ouster/sdk/pcap/pcap_multi_packet_reader.py @@ -0,0 +1,185 @@ +from typing import Dict, Iterator, List, Optional, Tuple +from ouster.sdk.client import PacketMultiSource +import ouster.sdk.pcap._pcap as _pcap +from ouster.sdk.pcap._pcap import PcapIndex # type: ignore +from ouster.sdk.pcap.pcap import _guess_ports, _packet_info_stream +from functools import partial + +import time + +from threading import Lock + +from ouster.sdk.client import SensorInfo, PacketIdError +from ouster.sdk.client.data import Packet, LidarPacket, ImuPacket + + +class PcapMultiPacketReader(PacketMultiSource): + """Read a sensors packet streams out of a pcap file as an iterator.""" + + _metadata: List[SensorInfo] + _metadata_json: List[str] + _rate: float + _handle: Optional[_pcap.playback_handle] + _lock: Lock + + def __init__(self, + pcap_path: str, + metadata_paths: List[str], + *, + rate: float = 0.0, + index: bool = False, + soft_id_check: bool = False): + """Read a single sensor data stream from a single packet capture file. + + Args: + metadata_paths: List of sensors metadata files + pcap_path: File path of recorded pcap + rate: Output packets in real time, if non-zero + index: Should index the source, may take extra time on startup + soft_id_check: if True, don't skip lidar packets buffers on + init_id mismatch + """ + self._metadata = [] + self._metadata_json = [] + self._indexed = index + self._soft_id_check = soft_id_check + self._id_error_count = 0 + + self._port_info: Dict[int, object] = dict() + + # sample pcap and attempt to find UDP ports consistent with metadatas + # NOTE[pb]: Needed for port guessing logic for old single sensor data. + n_packets = 1000 + stats = _packet_info_stream(pcap_path, n_packets) + + for meta_path in metadata_paths: + with open(meta_path) as meta_file: + meta_json = meta_file.read() + meta_info = SensorInfo(meta_json) + self._metadata_json.append(meta_json) + self._metadata.append(meta_info) + idx = len(self._metadata) - 1 + + # NOTE: Rudimentary logic of port guessing that is still needed + # for old single sensor data when `udp_port_lidar` and + # `udp_port_imu` fields are not set in sensor metadata. + # In some distant future we may need to remove it. + guesses = _guess_ports(stats, self._metadata[idx]) + if len(guesses) > 0: + lidar_guess, imu_guess = guesses[0] + meta_info.udp_port_lidar = meta_info.udp_port_lidar or lidar_guess + meta_info.udp_port_imu = meta_info.udp_port_imu or imu_guess + + port_to_packet = [ + (meta_info.udp_port_lidar, + partial(LidarPacket, + _raise_on_id_check=not soft_id_check)), + (meta_info.udp_port_imu, ImuPacket) + ] + for packet_port, packet_ctor in port_to_packet: + if packet_port in self._port_info: + raise RuntimeError( + f"Port collision: {packet_port}" + f" was already used for another stream") + self._port_info[packet_port] = dict(ctor=packet_ctor, + idx=idx) + + self._rate = rate + self._reader: Optional[_pcap.IndexedPcapReader] = \ + _pcap.IndexedPcapReader(pcap_path, self._metadata) # type: ignore + if self._indexed: + self._reader.build_index() + self._lock = Lock() + + def __iter__(self) -> Iterator[Tuple[int, Packet]]: + with self._lock: + if self._reader is None: + raise ValueError("I/O operation on closed packet source") + + buf = bytearray(2**16) + packet_info = _pcap.packet_info() + + real_start_ts = time.monotonic() + pcap_start_ts = None + while True: + with self._lock: + if not (self._reader and + self._reader.next_packet()): + break + packet_info = self._reader.current_info() + if packet_info.dst_port not in self._port_info: + # not lidar or imu packet that we are interested in + continue + packet_data = self._reader.current_data() + n = len(packet_data) + buf = packet_data.tobytes() # type: ignore + + # if rate is set, read in 'real time' simulating UDP stream + # TODO: factor out into separate packet iterator utility + timestamp = packet_info.timestamp + if self._rate: + if not pcap_start_ts: + pcap_start_ts = timestamp + real_delta = time.monotonic() - real_start_ts + pcap_delta = (timestamp - pcap_start_ts) / self._rate + delta = max(0, pcap_delta - real_delta) + time.sleep(delta) + + try: + port_info = self._port_info[packet_info.dst_port] + idx = port_info["idx"] # type: ignore + packet = port_info["ctor"](buf[0:n], self._metadata[idx], timestamp) # type: ignore + if isinstance(packet, LidarPacket) and packet.id_error: + self._id_error_count += 1 + yield (idx, packet) + except PacketIdError: + self._id_error_count += 1 + except ValueError: + # bad packet size here: this can happen when + # packets are buffered by the OS, not necessarily an error + # same pass as in core.py + # TODO: introduce status for PacketSource to indicate frequency + # of bad packet size or init_id/sn errors + pass + + @property + def metadata(self) -> List[SensorInfo]: + """Metadata associated with the packet.""" + return self._metadata + + @property + def is_live(self) -> bool: + return False + + @property + def is_seekable(self) -> bool: + return self._indexed + + @property + def is_indexed(self) -> bool: + return self._indexed + + @property + def _index(self) -> Optional[PcapIndex]: + with self._lock: + return self._reader.get_index() if self._reader else None + + def seek(self, offset: int) -> None: + if self._reader: + self._reader.seek(offset) + + # diagnostics + @property + def id_error_count(self) -> int: + return self._id_error_count + + def restart(self) -> None: + """Restart playback, only relevant to non-live sources""" + with self._lock: + if self._reader: + self._reader.reset() + + def close(self) -> None: + """Release Pcap resources. Thread-safe.""" + with self._lock: + self._reader = None diff --git a/python/src/ouster/sdk/pcap/pcap_scan_source.py b/python/src/ouster/sdk/pcap/pcap_scan_source.py new file mode 100644 index 00000000..f25fba54 --- /dev/null +++ b/python/src/ouster/sdk/pcap/pcap_scan_source.py @@ -0,0 +1,146 @@ +from typing import List, Optional, Tuple, Union + +from ouster.sdk.client import LidarScan, first_valid_packet_ts +from ouster.sdk.client import ScansMulti # type: ignore +from ouster.sdk.client.multi import collate_scans # type: ignore +from ouster.sdk.util import (resolve_field_types, resolve_metadata_multi, + ForwardSlicer, progressbar) # type: ignore +from .pcap_multi_packet_reader import PcapMultiPacketReader + + +class PcapScanSource(ScansMulti): + """Implements MultiScanSource protocol for pcap files with multiple sensors.""" + + def __init__( + self, + file_path: str, + *, + dt: int = 10**8, + complete: bool = False, + index: bool = False, + cycle: bool = False, + flags: bool = True, + raw_headers: bool = False, + raw_fields: bool = False, + soft_id_check: bool = False, + meta: Tuple[str, ...] = (), + **_ + ) -> None: + """ + Args: + file_path: OSF filename as scans source + dt: max time difference between scans in the collated scan (i.e. + time period at which every new collated scan is released/cut), + default is 0.1s. + complete: set to True to only release complete scans + index: if this flag is set to true an index will be built for the pcap + file enabling len, index and slice operations on the scan source, if + the flag is set to False indexing is skipped (default is False). + cycle: repeat infinitely after iteration is finished (default is False) + flags: when this option is set, the FLAGS field will be added to the list + of fields of every scan, in case of dual returns FLAGS2 will also be + appended (default is True). + """ + + self._source: Optional[PcapMultiPacketReader] + self._source = None # initialize the attribute so close works correctly if we fail out + + try: + metadata_paths = list(meta) + if not meta: + metadata_paths = resolve_metadata_multi(file_path) + + if not metadata_paths: + raise RuntimeError( + "Metadata jsons not found. Make sure that metadata json files " + "have common prefix with a PCAP file") + + # TODO: need a better way to save these + self._metadata_paths = metadata_paths + print(f"loading metadata from {metadata_paths}") + + self._source = PcapMultiPacketReader(file_path, + metadata_paths=metadata_paths, + index=index, + soft_id_check=soft_id_check) + except Exception: + self._source = None + raise + + # generate the field types per sensor with flags/raw_fields if specified + field_types = resolve_field_types(self._source.metadata, + flags=flags, + raw_headers=raw_headers, + raw_fields=raw_fields) + + super().__init__(self._source, dt=dt, complete=complete, + cycle=cycle, fields=field_types) + + # TODO[IMPORTANT]: there is a bug with collate scans in which it always + # skips the first frame + def collate_scans_itr(scans_itr): + return collate_scans(scans_itr, self.sensors_count, + first_valid_packet_ts, dt=self._dt) + + if index: + self._frame_offset = [] + pi = self._source._index # type: ignore + scans_itr = collate_scans_itr(self._scans_iter(True, False, False)) + # scans count in first source + scans_count = len(pi.frame_id_indices[0]) # type: ignore + for scan_idx, scans in enumerate(scans_itr): + offsets = [pi.frame_id_indices[idx].get( # type: ignore + scan.frame_id) for idx, scan in enumerate(scans) if scan] + self._frame_offset.append(min([v for v in offsets if v])) + progressbar(scan_idx, scans_count, "", "indexed") + print("\nfinished building index") + + @property + def scans_num(self) -> List[Optional[int]]: + if not self.is_indexed: + return [None] * self.sensors_count + pi = self._source._index # type: ignore + return [pi.frame_count(i) for i in range(self.sensors_count)] # type: ignore + + def __len__(self) -> int: + if not self.is_indexed: + raise TypeError("len is not supported on non-indexed source") + return len(self._frame_offset) + + def __getitem__(self, key: Union[int, slice] + ) -> Union[List[Optional[LidarScan]], List[List[Optional[LidarScan]]]]: + + if not self.is_indexed: + raise TypeError( + "can not invoke __getitem__ on non-indexed source") + + if isinstance(key, int): + L = len(self) + if key < 0: + key += L + if key < 0 or key >= L: + raise IndexError("index is out of range") + offset = self._frame_offset[key] + self._source.seek(offset) # type: ignore + scans_itr = self._scans_iter(False, False, True) + return next(collate_scans(scans_itr, self.sensors_count, + first_valid_packet_ts, dt=self._dt)) + + if isinstance(key, slice): + L = len(self) + k = ForwardSlicer.normalize(key, L) + count = k.stop - k.start + if count <= 0: + return [] + offset = self._frame_offset[k.start] + self._source.seek(offset) # type: ignore + scans_itr = collate_scans(self._scans_iter(False, False, True), + self.sensors_count, + first_valid_packet_ts, + dt=self._dt) + result = [scan for idx, scan in ForwardSlicer.slice( + enumerate(scans_itr), k) if idx < count] + return result if k.step > 0 else list(reversed(result)) + + raise TypeError( + f"indices must be integer or slice, not {type(key).__name__}") diff --git a/python/src/ouster/sdk/pcap/py.typed b/python/src/ouster/sdk/pcap/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/python/src/ouster/sdk/sensor/__init__.py b/python/src/ouster/sdk/sensor/__init__.py new file mode 100644 index 00000000..d2cd52bd --- /dev/null +++ b/python/src/ouster/sdk/sensor/__init__.py @@ -0,0 +1,10 @@ +""" +Copyright (c) 2024, Ouster, Inc. +All rights reserved. + +Sensor specific interfaces. +""" +# flake8: noqa: F401 (unused imports) + +from .sensor_multi_packet_reader import SensorMultiPacketReader # type: ignore +from .sensor_scan_source import SensorScanSource \ No newline at end of file diff --git a/python/src/ouster/sdk/sensor/py.typed b/python/src/ouster/sdk/sensor/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/python/src/ouster/sdk/sensor/sensor_multi_packet_reader.py b/python/src/ouster/sdk/sensor/sensor_multi_packet_reader.py new file mode 100644 index 00000000..18053fa3 --- /dev/null +++ b/python/src/ouster/sdk/sensor/sensor_multi_packet_reader.py @@ -0,0 +1,247 @@ +# type: ignore +from typing import (Iterator, List, Optional, Tuple, Callable) + +import os +import logging +from math import ceil + +from threading import Thread + +import ouster.sdk.client as client +from ouster.sdk.client import PacketMultiSource +import ouster.sdk.client._client as _client +from ouster.sdk.client import SensorInfo, PacketIdError +from ouster.sdk.client.data import Packet, LidarPacket, ImuPacket + + +logger = logging.getLogger("multi-logger") + +MULTI_DEBUG = 0 +try: + MULTI_DEBUG = int(os.getenv("OUSTER_SDK_MULTI_DEBUG", 0)) + if MULTI_DEBUG: + logger.setLevel(logging.DEBUG) +except Exception: + pass + + +class SensorMultiPacketReader(PacketMultiSource): + """Multi sensor packet source""" + + def __init__(self, + hostnames: List[str], + ports: List[Tuple[int, int]], + *, + buf_size_secs: float = 2.0, + timeout: Optional[float] = 2.0, + _overflow_err: bool = False, + _flush_before_read: bool = True, + _flush_frames: int = 5, + _skip_metadata_beam_validation: bool = False) -> None: + """ + Neither the ports nor udp destination configuration on the sensors will + be updated. The metadata will be fetched over the network from the + sensors. + + Args: + hostnames: list of hostnames or IP addresss of the sensors + ports: list of tuples of UDP ports to listen on for lidar/imu data + buf_size_secs: seconds of the data to buffer before OVERFLOW + timeout: seconds to wait for packets before signaling error or None + _overflow_err: if True, raise ClientOverflow + _flush_before_read: if True, try to clear buffers before reading + _skip_metadata_beam_validation: if True, skip metadata beam angle check + + Raises: + ClientError: If initializing the client fails. + """ + assert len(hostnames) == len(ports) + self._hostnames = hostnames + self._connections = [ + _client.SensorConnection(h, lp, ip) + for h, (lp, ip) in zip(self._hostnames, ports) + ] + self._timeout = timeout + self._overflow_err = _overflow_err + self._flush_before_read = _flush_before_read + self._fetched_meta: List[str] = [] + self._flush_frames = _flush_frames + self._skip_metadata_beam_validation = _skip_metadata_beam_validation + + # Fetch metadatas from the sensors (always) + self._fetch_metadata() + self._metadata = [ + SensorInfo(m_json, self._skip_metadata_beam_validation) + for m_json in self._fetched_meta + ] + + self._id_error_count = [0] * len(self.metadata) + + # set names + for m, hn in zip(self._metadata, self._hostnames): + m.hostname = hn + + self._pf = [_client.PacketFormat.from_info(m) for m in self._metadata] + self._cli = _client.UDPPacketSource() + for conn, info in zip(self._connections, self._metadata): + self._cli.add_client(conn, info, buf_size_secs) + + self._producer = Thread(target=self._cli.produce) + self._producer.start() + + def _fetch_metadata(self, timeout: Optional[float] = None) -> None: + timeout_sec = 45 + if timeout: + timeout_sec = ceil(timeout) + if not self._fetched_meta: + self._fetched_meta = [c.get_metadata( + legacy=False, timeout_sec=timeout_sec) for c in self._connections] + if not all(self._fetched_meta): + raise client.ClientError("Failed to collect metadata. UPS :(") + + def _next_packet(self) -> Optional[Tuple[int, Packet]]: + e = self._cli.pop(self._timeout) + try: + if e.state & _client.ClientState.OVERFLOW: + if self._overflow_err: + raise client.ClientOverflow( + f"Overflow on source id: {e.source}") + # TODO[pb]: This is a strange case, not sure what we need to do here ... + raise ValueError( + f"Overflow on sensor [{e.source}] was detected " + f"but ClientOverflow can't be raised so we are " + f"raising ValueError, hmmm ...") + if e.state & _client.ClientState.LIDAR_DATA: + p = self._cli.packet(e) + packet = LidarPacket( + p._data, self._metadata[e.source], p.capture_timestamp) + return (e.source, packet) + elif e.state & _client.ClientState.IMU_DATA: + p = self._cli.packet(e) + packet = ImuPacket( + p._data, self._metadata[e.source], p.capture_timestamp) + return (e.source, packet) + elif e.state == _client.ClientState.TIMEOUT: + raise client.ClientTimeout( + f"No packets received within {self._timeout}s") + elif e.state & _client.ClientState.ERROR: + raise client.ClientError("Client returned ERROR state") + elif e.state & _client.ClientState.EXIT: + return None + except PacketIdError as err: + self._id_error_count[e.source] += 1 + raise err + finally: + # LidarPacket/ImuPacket ctors may raise but we always want to + # advance the subscriber so to not overflow + self._cli.advance(e) + + raise AssertionError("Should be unreachable, UUPS") + + def __iter__(self) -> Iterator[Tuple[int, Packet]]: + """Starts the multi packet source iterator.""" + + if not self._producer.is_alive(): + raise ValueError("I/O operation on closed packet source") + + if self._flush_before_read: + flush = self._flush_impl(self._flush_frames) + else: + # autopep8: off + flush = lambda _: False + # autopep8: on + + while True: + try: + p = self._next_packet() + if p is not None: + if not flush(p): + yield p + else: + break + except (ValueError, PacketIdError): + # bad packet size here: this can happen when + # packets are buffered by the OS, not necessarily an error + # same pass as in data.py + pass + + def _flush_impl( + self, + n_frames: int = 3) -> Callable[[Tuple[int, client.Packet]], bool]: + """Makes a flush function for an iterator to drop n_frames per sensor. + + Args: + n_frames: number of frames to drop (used for every sensor) + + Returns: + The predicate function that indicate the need to flush the current packet. + """ + + frames_cnt = [n_frames] * len(self.metadata) + sensor_flushed = [False] * len(self.metadata) + + frame_bound = [client.FrameBorder() for _ in self.metadata] + + def flush_impl(p: Tuple[int, client.Packet]) -> bool: + nonlocal frame_bound + idx, packet = p[0], p[1] + if sensor_flushed[idx]: + return False + if not frame_bound[idx](packet): + return True + if frames_cnt[idx] > 0: + frames_cnt[idx] -= 1 + return True + sensor_flushed[idx] = True + return False + + return flush_impl + + @property + def metadata(self) -> List[SensorInfo]: + """Metadata associated with the packet streams.""" + return self._metadata + + @property + def is_live(self) -> bool: + return True + + @property + def is_seekable(self) -> bool: + return False + + @property + def is_indexed(self) -> bool: + return False + + def restart(self) -> None: + # NOTE[self]: currently we ignore the call for a live sensor but one + # could interpret this invocation as a sensor "reinit" command + pass + + def close(self) -> None: + """Shut down producer thread and close network connections. + + Attributes may be unset if constructor throws an exception. + """ + if hasattr(self, '_cli'): + self._cli.shutdown() + if hasattr(self, '_producer'): + self._producer.join() + if hasattr(self, '_connections'): + for conn in self._connections: + conn.shutdown() + + def __del__(self) -> None: + self.close() + + # these methods are for diagnostics + @property + def buf_use(self) -> int: + """Size of the buffers that is actially used""" + return self._cli.size + + @property + def id_error_count(self) -> List[int]: + """Number of PacketIdError accumulated per connection/sensor""" + return self._id_error_count diff --git a/python/src/ouster/sdk/sensor/sensor_scan_source.py b/python/src/ouster/sdk/sensor/sensor_scan_source.py new file mode 100644 index 00000000..c7c874b5 --- /dev/null +++ b/python/src/ouster/sdk/sensor/sensor_scan_source.py @@ -0,0 +1,125 @@ +from typing import List, Optional, Union + +import numpy as np +import ouster.sdk.client as client +from ouster.sdk.client import ScansMulti # type: ignore +from ouster.sdk.util import default_scan_fields # type: ignore +from .util import configure_sensor +# TODO: from .sensor_multi_packet_reader import SensorMultiPacketReader + + +class SensorScanSource(ScansMulti): + """Implements MultiScanSource protocol for live sensors, multiple sensors isn't supported yet.""" + + def __init__( + self, + hostnames: Union[str, List[str]], + *, + lidar_port: int = 7502, + imu_port: int = 7503, + complete: bool = False, + soft_id_check: bool = False, + do_not_reinitialize: bool = False, + no_auto_udp_dest: bool = False, + buf_size: int = 128, + timeout: float = 1.0, + extrinsics: Optional[List[float]] = None, + flags: bool = True, + **kwargs + ) -> None: + """ + Args: + hostnames: sensor hostname urls or IPs. + complete: set to True to only release complete scans. + flags: when this option is set, the FLAGS field will be added to the list + of fields of every scan, in case of dual returns FLAGS2 will also be + appended (default is True). + """ + + self._source = None + + if isinstance(hostnames, str): + hostnames = [hostnames] + elif len(hostnames) > 1: + raise NotImplementedError("multi sensor is not implemented") + + if 'meta' in kwargs and kwargs['meta']: + raise TypeError( + f"{SensorScanSource.__name__} does not support user-supplied metadata.") + + config = configure_sensor(hostnames[0], + lidar_port, + imu_port, + do_not_reinitialize=do_not_reinitialize, + no_auto_udp_dest=no_auto_udp_dest) + + print(f"Initializing connection to sensor {hostnames[0]} on " + f"lidar port {config.udp_port_lidar} with udp dest '{config.udp_dest}'...") + + # make 0 timeout in the cli mean no timeout + timeout_ = timeout if timeout > 0 else None + + lidar_port = config.udp_port_lidar if config.udp_port_lidar else 7502 + imu_port = config.udp_port_imu if config.udp_port_imu else 7503 + + self._source = client.Sensor(hostnames[0], + lidar_port, + imu_port, + buf_size=buf_size, + timeout=timeout_, + soft_id_check=soft_id_check) + + # enable parsing flags field + # TODO: try to switch to using the resolve_field_types + self._fields = default_scan_fields(self._source.metadata.format.udp_profile_lidar, + flags=flags) + + self._scans = client.Scans(self._source, + timeout=timeout_, + complete=complete, + fields=self._fields, + _max_latency=2) + + if extrinsics: + self._scans.metadata.extrinsic = np.array( + extrinsics).reshape((4, 4)) + print( + f"Using sensor extrinsics:\n{self._scans.metadata.extrinsic}") + + # NOTE: the following properties have been adapted to the multi sensor case + # using the single client.Scans inteface. + @property + def sensors_count(self) -> int: + return 1 + + @property + def metadata(self) -> List[client.SensorInfo]: + return [self._source.metadata] # type: ignore + + @property + def is_live(self) -> bool: + return True + + @property + def is_seekable(self) -> bool: + return False + + @property + def is_indexed(self) -> bool: + return False + + @property + def fields(self) -> List[client.FieldTypes]: + return [self._fields] + + def __iter__(self): + + def encompass(it): + for x in it: + yield [x] + + return encompass(self._scans) + + def close(self): + if self._source: + self._source.close() diff --git a/python/src/ouster/sdk/sensor/util.py b/python/src/ouster/sdk/sensor/util.py new file mode 100644 index 00000000..c3ab91d9 --- /dev/null +++ b/python/src/ouster/sdk/sensor/util.py @@ -0,0 +1,157 @@ +from typing import List, Optional +from copy import copy +import requests +import ouster.sdk.client as client +from packaging import version +from ouster.sdk.util import firmware_version + +MIN_AUTO_DEST_FW = version.Version("2.3.1") + + +def _auto_detected_udp_dest(hostname: str) -> Optional[str]: + """ + Function which obtains the udp_dest the sensor would choose when automatically detecting + without changing anything else about sensor state + + Args: + hostname: sensor hostname + Returns: + udp_dest: the udp_dest the sensor detects automatically + """ + orig_config = client.get_config(hostname, active=True) + + # get what the possible auto udp_dest is + config_endpoint = f"http://{hostname}/api/v1/sensor/config" + response = requests.post(config_endpoint, params={'reinit': False, 'persist': False}, + json={'udp_dest': '@auto'}) + response.raise_for_status() + + # get staged config + udp_auto_config = client.get_config(hostname, active=False) + + # set staged config back to original + response = requests.post(config_endpoint, params={'reinit': False, 'persist': False}, + json={'udp_dest': str(orig_config.udp_dest)}) + response.raise_for_status() + + return udp_auto_config.udp_dest + + +def configure_sensor(hostname: str, + lidar_port: Optional[int] = None, + imu_port: Optional[int] = None, + do_not_reinitialize: bool = False, + no_auto_udp_dest: bool = False) -> client.SensorConfig: + """Depending on the args do_not_reinitialize, no_auto_udp_dest, + possibly reconfigure the sensor. Then, return the configuration that is used.""" + + print(f"Contacting sensor {hostname}...") + + fw_version = firmware_version(hostname) + + auto_config_udp_dest = None + use_set_config_auto = False + + # original config + orig_config = client.get_config(hostname, active=True) + + if fw_version >= MIN_AUTO_DEST_FW: + auto_config_udp_dest = _auto_detected_udp_dest(hostname) + if orig_config.udp_dest != auto_config_udp_dest: + if no_auto_udp_dest or do_not_reinitialize: + print(f"WARNING: Your sensor's udp destination {orig_config.udp_dest} does " + f"not match the detected udp destination {auto_config_udp_dest}. " + f"If you get a Timeout error, drop -x and -y from your " + f"arguments to allow automatic udp_dest setting.") + else: + if no_auto_udp_dest or do_not_reinitialize: + print("WARNING: You have opted not to allow us to reset your auto UDP dest " + "by using either -x or -y. If you get a Timeout error, drop -x and -y " + "from your arguments to allow automatic udp_dest setting.") + else: + use_set_config_auto = True + + if do_not_reinitialize: + + if orig_config.operating_mode == client.OperatingMode.OPERATING_STANDBY: + raise RuntimeError("Your sensor is in STANDBY mode but you have disallowed " + "reinitialization. Drop -x to allow reinitialization or " + "change your sensor's operating mode.") + + if lidar_port and orig_config.udp_port_lidar != lidar_port: + raise RuntimeError( + f"Sensor's lidar port {orig_config.udp_port_lidar} does " + f"not match provided lidar port but you have disallowed " + f"reinitialization. Drop -x to allow reinitialization or " + f"change your specified lidar_port {lidar_port}") + return orig_config + + new_config = copy(orig_config) + + lidar_port_change = (lidar_port and + orig_config.udp_port_lidar != lidar_port) + imu_port_change = (imu_port and orig_config.udp_port_imu != imu_port) + port_changes = [] + if (lidar_port_change or imu_port_change): + new_config.udp_port_lidar = lidar_port or orig_config.udp_port_lidar + new_config.udp_port_imu = imu_port or orig_config.udp_port_imu + + if lidar_port_change: + port_changes.append(f"lidar port from {orig_config.udp_port_lidar} " + f"to {new_config.udp_port_lidar}") + + if imu_port_change: + port_changes.append(f"imu port from {orig_config.udp_port_imu} " + f"to {new_config.udp_port_imu}") + + port_changes_str = " and ".join(port_changes) + print(f"Will change {port_changes_str} ...") + + if not no_auto_udp_dest and auto_config_udp_dest and orig_config.udp_dest != auto_config_udp_dest: + print((f"Will change udp_dest from '{orig_config.udp_dest}' to automatically " + f"detected '{auto_config_udp_dest}'...")) + new_config.udp_dest = auto_config_udp_dest + + if use_set_config_auto: + print(f"Will change udp_dest from '{orig_config.udp_dest}' to automatically " + "detected UDP DEST") + new_config.udp_dest = None + + new_config.operating_mode = client.OperatingMode.OPERATING_NORMAL + if new_config.operating_mode != orig_config.operating_mode: + print((f"Will change sensor's operating mode from {orig_config.operating_mode}" + f" to {new_config.operating_mode}")) + + if orig_config != new_config or use_set_config_auto: + print("Setting sensor config...") + client.set_config(hostname, new_config, persist=False, + udp_dest_auto=use_set_config_auto) + + new_config = client.get_config(hostname) + + return new_config + + +def configure_sensor_multi( + hostnames: List[str], + first_lidar_port: Optional[int] = None, + do_not_reinitialize: bool = False, + no_auto_udp_dest: bool = False) -> List[client.SensorConfig]: + """Configure multiple sensors by hostnames/ips""" + + first_lidar_port = first_lidar_port or 17502 + configs: List[client.SensorConfig] = [] + + for idx, hn in enumerate(hostnames): + lidar_port = first_lidar_port + idx * 2 + imu_port = lidar_port + 1 + configs.append( + configure_sensor(hn, + lidar_port=lidar_port, + imu_port=imu_port, + do_not_reinitialize=do_not_reinitialize, + no_auto_udp_dest=no_auto_udp_dest)) + print(f"Initializing connection to sensor {hn} on " + f"lidar port {configs[-1].udp_port_lidar} with udp dest " + f"'{configs[-1].udp_dest}'...") + return configs diff --git a/python/src/ouster/sdk/sensor_util.py b/python/src/ouster/sdk/sensor_util.py new file mode 100644 index 00000000..e96df871 --- /dev/null +++ b/python/src/ouster/sdk/sensor_util.py @@ -0,0 +1,3 @@ +# TODO: remove +raise RuntimeError("The methods in this file have been move to ouster.sensor.util." +" THIS FILE WILL BE REMOVED IN NEXT RELEASE") diff --git a/python/src/ouster/sdk/simple_viz.py b/python/src/ouster/sdk/simple_viz.py index 5592ebbd..d068a1d9 100644 --- a/python/src/ouster/sdk/simple_viz.py +++ b/python/src/ouster/sdk/simple_viz.py @@ -3,6 +3,8 @@ All rights reserved. """ +# TODO: remove + def main() -> None: print("We have moved simple-viz into our new command line utility. " diff --git a/python/src/ouster/sdk/util/__init__.py b/python/src/ouster/sdk/util/__init__.py new file mode 100644 index 00000000..6d24ce25 --- /dev/null +++ b/python/src/ouster/sdk/util/__init__.py @@ -0,0 +1,28 @@ +""" +Copyright (c) 2024, Ouster, Inc. +All rights reserved. + +Generic util module +""" +# flake8: noqa: F401 (unused imports) + +from .metadata import resolve_metadata +from .metadata import resolve_metadata_multi +from .metadata import firmware_version # TODO[UN]: this function should + # probably should be moved to + # its own file. + +from .parsing import default_scan_fields # type: ignore +from .parsing import scan_to_packets # type: ignore +from .parsing import resolve_field_types # type: ignore +from .parsing import PacketFormat # type: ignore +from .parsing import ColHeader # type: ignore +from .parsing import FusaDualFormat # type: ignore + +from .extrinsics import resolve_extrinsics # type: ignore +from .extrinsics import _parse_extrinsics_file # type: ignore +from .extrinsics import img_aspect_ratio # type: ignore + +from .progress_bar import progressbar + +from .forward_slicer import ForwardSlicer diff --git a/python/src/ouster/sdkx/util.py b/python/src/ouster/sdk/util/extrinsics.py similarity index 93% rename from python/src/ouster/sdkx/util.py rename to python/src/ouster/sdk/util/extrinsics.py index ff8686a4..ab0fb2c3 100644 --- a/python/src/ouster/sdkx/util.py +++ b/python/src/ouster/sdk/util/extrinsics.py @@ -5,7 +5,7 @@ import json from typing import Optional, Tuple, List -from ouster import client +from ouster.sdk import client import numpy as np import tarfile import re @@ -53,11 +53,13 @@ def quatToRotMat(q: np.ndarray) -> np.ndarray: """Converts Quaternion [w, x, y, z] to Rotation [3x3] matrix.""" (q0, q1, q2, q3) = q # yapf: disable + # autopep8: off return np.array([ [2 * (q0 * q0 + q1 * q1) - 1, 2 * (q1 * q2 - q0 * q3), 2 * (q1 * q3 + q0 * q2)], [2 * (q1 * q2 + q0 * q3), 2 * (q0 * q0 + q2 * q2) - 1, 2 * (q2 * q3 - q0 * q1)], [2 * (q1 * q3 - q0 * q2), 2 * (q2 * q3 + q0 * q1), 2 * (q0 * q0 + q3 * q3) - 1] ]) + # autopep8: on # yapf: enable @@ -88,7 +90,9 @@ def _parse_extrinsics_json(json_data: str, """Parsing extrinsics json and looking for sensor names transforms.""" try: extrinsics_data = json.loads(json_data) - except Exception: + except Exception as e: + # TODO[pb]: Use logging + print("ERROR: Can't parse extrinsics_parameters.json file: ", str(e)) return [] if "transforms" not in extrinsics_data: @@ -130,9 +134,10 @@ def resolve_extrinsics( `extrinsics.json` files when it will be fully defined. """ snames = sensor_names or [info.sn for info in infos] - if os.path.splitext(data_path)[1] == ".pcap": - ext_file = os.path.join(os.path.dirname(data_path), - "extrinsic_parameters.json") + if os.path.splitext(data_path)[1] == ".pcap" or os.path.isdir(data_path): + ext_file = os.path.join( + os.path.dirname(data_path) if not os.path.isdir(data_path) else + data_path, "extrinsic_parameters.json") if os.path.exists(ext_file): # Found perception extrinsics file return _parse_extrinsics_file(ext_file, diff --git a/python/src/ouster/sdk/util/forward_slicer.py b/python/src/ouster/sdk/util/forward_slicer.py new file mode 100644 index 00000000..7f2f4bfe --- /dev/null +++ b/python/src/ouster/sdk/util/forward_slicer.py @@ -0,0 +1,64 @@ +from typing import Iterator + + +class ForwardSlicer: + """ForwardSlicer provides slicing methods to slice up a container with step + to containers that only support forward slicing""" + + @staticmethod + def normalize(key: slice, L: int) -> slice: + + def _slice_step(step): + if step is None: + return 1 + if step == 0: + raise ValueError("slice step cannot be zero") + return step + + def _slice_clamp(value, length, default): + if value is None: + return default + if value < 0: + return max(0, length + value) + return min(value, length) + + step = _slice_step(key.step) + if step > 0: + start = _slice_clamp(key.start, L, 0) + stop = _slice_clamp(key.stop, L, L) + else: + start = _slice_clamp(key.stop, L, -1) + 1 + stop = min(L, _slice_clamp(key.start, L, L) + 1) + + return slice(start, stop, step) + + @staticmethod + def slice(data_iter: Iterator, key: slice): + """ + Performs forward slicing on a dataset with step + + Parameters: + - key: must be a normalized slice key with relation to the used data_iter. + a normalized slice key is one where key.start < key.stop and no non-values + """ + + def _stepper(data_iter, start, stop, step): + out = [] + if step < 0: + # align with the end + step = -step + aligned_start = (stop - 1) - (stop - start) // step * step + if aligned_start < start: + aligned_start += step + for _ in range(aligned_start - start): + next(data_iter) + while True: + try: + out.append(next(data_iter)) + for _ in range(step - 1): + next(data_iter) + except StopIteration: + break + return out + + return _stepper(data_iter, key.start, key.stop, key.step) diff --git a/python/src/ouster/sdk/util.py b/python/src/ouster/sdk/util/metadata.py similarity index 67% rename from python/src/ouster/sdk/util.py rename to python/src/ouster/sdk/util/metadata.py index 206ff0c5..5fa6f587 100644 --- a/python/src/ouster/sdk/util.py +++ b/python/src/ouster/sdk/util/metadata.py @@ -1,13 +1,18 @@ """Miscellaneous utilites.""" -import os +import os.path +from pathlib import Path from typing import Optional, List, Any from packaging import version import requests import re -def resolve_metadata_multi_with_prefix_guess(data_path: str) -> List[str]: +data_must_be_a_file_err = "The source parameter must be a path to a file." +meta_must_be_a_file_err = "The metadata parameter must be a path to a file." + + +def _resolve_metadata_multi_with_prefix_guess(data_path: str) -> List[str]: """Look for best-matching metadata files from all json files in the same dir Args: @@ -16,11 +21,17 @@ def resolve_metadata_multi_with_prefix_guess(data_path: str) -> List[str]: Returns: list of metadata json paths guessed with the most common prefix match """ + if not os.path.isfile(data_path): + raise ValueError(data_must_be_a_file_err) + dirname, pcap_ = os.path.split(data_path) if not dirname: dirname = os.getcwd() - # find all .json in same dir - options = list(filter(lambda f: f.endswith(".json"), os.listdir(dirname))) + # find all .json files in same dir + options = list(filter( + lambda f: (Path(dirname) / f).is_file() and + f.lower().endswith(".json"), os.listdir(dirname) + )) # for each json name, find how many characters are in common option_scores = map(lambda f: len(os.path.commonprefix([f, pcap_])), options) @@ -28,13 +39,15 @@ def resolve_metadata_multi_with_prefix_guess(data_path: str) -> List[str]: return [] # select all jsons with the longest common prefix of equal size sorted_options = sorted(zip(options, option_scores), - key=lambda i: i[1], - reverse=True) + key=lambda i: i[1], + reverse=True) best_score = sorted_options[0][1] if not best_score: - # return a single json if there is no files with commonprefix - # because it's probably not a multi-sensor recording - return [os.path.join(dirname, sorted_options[0][0])] + # TWS 20240329: previously, this method would return + # any old JSON file even if there was no common prefix. + # In my experience, it's almost always an incorrect guess. + # Now it requires at least a single character to be common. + return [] else: return [ os.path.join(dirname, b_path) for b_path, _ in filter( @@ -61,14 +74,13 @@ def resolve_metadata(data_path: str, metadata json paths guessed with the most common prefix match or passed through from `meta_path` parameter """ - if meta_path is None: - meta_paths = resolve_metadata_multi_with_prefix_guess(data_path) - meta_path = meta_paths[0] if meta_paths else "" - if os.path.exists(meta_path): + if meta_path is not None: + if os.path.isfile(meta_path): return meta_path - elif os.path.exists(meta_path): - return meta_path - return None + raise ValueError(meta_must_be_a_file_err) + + meta_paths = _resolve_metadata_multi_with_prefix_guess(data_path) + return meta_paths[0] if meta_paths else None def resolve_metadata_multi(data_path: str) -> List[str]: @@ -80,7 +92,7 @@ def resolve_metadata_multi(data_path: str) -> List[str]: Returns: list of metadata json paths guessed with the most common prefix match """ - return resolve_metadata_multi_with_prefix_guess(data_path) + return _resolve_metadata_multi_with_prefix_guess(data_path) def firmware_version(hostname: str) -> Any: @@ -94,7 +106,6 @@ def firmware_version(hostname: str) -> Any: return version.Version(".".join( [match.group(1), match.group(2), match.group(3)])) - else: - raise RuntimeError( - f"Could not get sensor firmware version from {response.text}") - return None + + raise RuntimeError( + f"Could not get sensor firmware version from {response.text}") diff --git a/python/src/ouster/sdkx/parsing.py b/python/src/ouster/sdk/util/parsing.py similarity index 87% rename from python/src/ouster/sdkx/parsing.py rename to python/src/ouster/sdk/util/parsing.py index 30c3a051..7d365404 100644 --- a/python/src/ouster/sdkx/parsing.py +++ b/python/src/ouster/sdk/util/parsing.py @@ -10,16 +10,16 @@ import numpy as np -import ouster.client as client -from ouster.client import (ChanField, ColHeader, FieldDType, SensorInfo, +import ouster.sdk.client as client +from ouster.sdk.client import (ChanField, ColHeader, FieldDType, SensorInfo, UDPProfileLidar, LidarPacket) -from ouster.client._client import PacketWriter, get_field_types +from ouster.sdk.client._client import PacketWriter, get_field_types def default_scan_fields( profile: UDPProfileLidar, flags: bool = False, - raw_headers: bool = False) -> Optional[Dict[ChanField, FieldDType]]: + raw_headers: bool = False) -> Optional[client.FieldTypes]: """Get the default fields populated on scans for a profile. Convenient helper function if you want to tweak which fields are parsed @@ -56,6 +56,94 @@ def default_scan_fields( return fields.copy() +def resolve_field_types( + metadata: Union[client.SensorInfo, List[client.SensorInfo]], + flags: bool = False, + raw_headers: bool = False, + raw_fields: bool = False +) -> Union[client.FieldTypes, List[client.FieldTypes]]: + """Resolving optimal field types for OSF LidarScanStream encoder + + Shrinks the sizes of the LEGACY UDPLidarProfile fields and extends with + FLAGS/FLAGS2 if `flags=True`. + + Args: + metadata: single SensorInfo or a list of SensorInfo used resolve + UDPLidarProfile + flags: True if augment the resulting fields with FLAGS/FLAGS2 + raw_headers: True if RAW_HEADERS field should be included (i.e. all + lidar packet headers and footers will be added during + batching) + raw_fields: True if RAW32_WORDx fields should be included + + Returns: + field types of a typical LidarScan with a requested optional fields. + """ + + single_result = False + if not isinstance(metadata, list): + metadata = [metadata] + single_result = True + + field_types = [] + + for m in metadata: + ftypes = client.get_field_types(m) + profile = m.format.udp_profile_lidar + + # HACK: Overwrite fields to reduced datatypes for LEGACY (saves ~15% of + # space in a file) + if profile == client.UDPProfileLidar.PROFILE_LIDAR_LEGACY: + ftypes.update( + dict({ + client.ChanField.RANGE: np.uint32, + client.ChanField.SIGNAL: np.uint16, + client.ChanField.REFLECTIVITY: np.uint16, + client.ChanField.NEAR_IR: np.uint16 + })) + + if flags: + ftypes.update({client.ChanField.FLAGS: np.uint8}) + if client.ChanField.RANGE2 in ftypes: + ftypes.update({client.ChanField.FLAGS2: np.uint8}) + + if raw_fields: + ftypes.update({client.ChanField.RAW32_WORD1: np.uint32}) + if profile != client.UDPProfileLidar.PROFILE_LIDAR_RNG15_RFL8_NIR8: + # not Low Bandwidth + ftypes.update( + {client.ChanField.RAW32_WORD2: np.uint32}) + ftypes.update( + {client.ChanField.RAW32_WORD3: np.uint32}) + if client.ChanField.RANGE2 in ftypes: + ftypes.update( + {client.ChanField.RAW32_WORD4: np.uint32}) + if profile == client.UDPProfileLidar.PROFILE_LIDAR_FIVE_WORD_PIXEL: + ftypes.update( + dict({ + client.ChanField.RAW32_WORD4: np.uint32, + client.ChanField.RAW32_WORD5: np.uint32 + })) + + if raw_headers: + # getting the optimal field type for RAW_HEADERS + pf = client._client.PacketFormat.from_info(m) + h = pf.pixels_per_column + raw_headers_space = (pf.packet_header_size + + pf.packet_footer_size + pf.col_header_size + + pf.col_footer_size) + dtype = [ + np.uint8, + np.uint16, + np.uint32 + ][int(raw_headers_space / h)] + ftypes.update({client.ChanField.RAW_HEADERS: dtype}) # type: ignore + + field_types.append(ftypes) + + return field_types[0] if single_result else field_types + + @dataclass class FieldDescr: offset: int @@ -572,7 +660,7 @@ def scan_to_packets(ls: client.LidarScan, A set of lidar packets that will produce the same LidarScan if passed through the ScanBatcher again (less fields data) """ - return client._client.scan_to_packets(ls, PacketWriter.from_info(info)) + return client._client.scan_to_packets(ls, PacketWriter.from_info(info), info.init_id, int(info.sn)) def terminator_packet(info: client.SensorInfo, @@ -667,6 +755,6 @@ def cut_raw32_words(ls: client.LidarScan) -> client.LidarScan: client.ChanField.RAW32_WORD9 ] - import ouster.osf as osf + import ouster.sdk.osf as osf new_fields = {c: ls.field(c).dtype for c in ls.fields if c not in cut_chans} return osf.slice_and_cast(ls, new_fields) diff --git a/python/src/ouster/sdk/pose_util.py b/python/src/ouster/sdk/util/pose_util.py similarity index 96% rename from python/src/ouster/sdk/pose_util.py rename to python/src/ouster/sdk/util/pose_util.py index fde2a092..ad125de8 100644 --- a/python/src/ouster/sdk/pose_util.py +++ b/python/src/ouster/sdk/util/pose_util.py @@ -5,7 +5,7 @@ import bisect -from ouster import client +from ouster.sdk import client import logging @@ -426,6 +426,7 @@ class TrajectoryEvaluator(Poser): TODO: Optionally, we may want to implement these calculations in C++ and use bindings to make it faster. """ + def __init__(self, poses: TrajPoses, *, time_bounds: Optional[float] = 0): """ Args: @@ -642,37 +643,31 @@ def dewarp(xyz: np.ndarray, *, scan_pose: Optional[PoseH] = None, returned by call `client.XYZLut` so it can further used with PointViz and other functions that expect this specific layout. """ - if xyz.ndim != 3 or xyz.shape[2] != 3: - raise ValueError("Expects xyz to be (H, W, 3) in dewarp") + raise ValueError("Expect xyz to be (H, W, 3) shape") - h, w = xyz.shape[0], xyz.shape[1] + if scan_pose is not None and scan_pose.shape != (4, 4): + raise ValueError("Expect scan_pose to be (4, 4) shape") if column_poses is not None: if not (column_poses.shape[0] == xyz.shape[1] and column_poses.shape[1] == 4 and column_poses.shape[2] == 4): - raise ValueError("Expects column_poses to be (W, 4, 4) in dewarp") - if scan_pose is not None: - xyz_poses = np.einsum('ij,ljk->lik', scan_pose, column_poses) - else: - xyz_poses = column_poses - - # Angus's version: This one is correct for sure - xyz_res = np.transpose( - np.matmul(xyz_poses[:, :3, :3], np.transpose(xyz, axes=(1, 2, 0))), - axes=(2, 0, 1)) + xyz_poses[np.newaxis, :, :3, -1] - xyz_res = np.asfortranarray(xyz_res.reshape((-1, 3))) - return xyz_res.reshape((h, w, -1)) - - if scan_pose is None: - return xyz - - # Angus's version - xyz_res = np.transpose(np.matmul(scan_pose[np.newaxis, :3, :3], - np.transpose(xyz, axes=(1, 2, 0))), - axes=(2, 0, 1)) + scan_pose[np.newaxis, :3, -1] - xyz_res = np.asfortranarray(xyz_res.reshape((-1, 3))) - return xyz_res.reshape((h, w, -1)) + raise ValueError("Expect column_poses to be (W, 4, 4) shape") + + # Apply transformations + if column_poses is not None: + xyz_poses = np.matmul(scan_pose, column_poses) if scan_pose is not None else column_poses + + xyz_transformed = np.transpose(np.matmul(xyz_poses[:, :3, :3], np.transpose(xyz, axes=(1, 2, 0))), + axes=(2, 0, 1)) + xyz_poses[np.newaxis, :, :3, -1] + elif scan_pose is not None: + xyz_transformed = np.transpose(np.matmul(scan_pose[np.newaxis, :3, :3], + np.transpose(xyz, axes=(1, 2, 0))), + axes=(2, 0, 1)) + scan_pose[np.newaxis, :3, -1] + else: + xyz_transformed = xyz + + return xyz_transformed ScansIterable = Union[Iterable[client.LidarScan], diff --git a/python/src/ouster/sdk/util/progress_bar.py b/python/src/ouster/sdk/util/progress_bar.py new file mode 100644 index 00000000..e66340c2 --- /dev/null +++ b/python/src/ouster/sdk/util/progress_bar.py @@ -0,0 +1,18 @@ +def progressbar(progress, total, prefix="", suffix=""): + """ + Displays progress in the console as a percentage. + + Args: + progress: The current progress (number of items completed). + total: The total number of items. + prefix: A prefix string to display before the progress bar (optional). + suffix: A suffix string to display after the progress bar (optional). + """ + if total == 0: + raise ValueError( + "Progress cannot be displayed for a total of 0 items.") + progress = total if progress > total else progress + percent = round(100 * progress / total, 1) + filled_length = int(round(percent * 20 / 100)) + bar = f'[{filled_length * "#"}{(20 - filled_length) * "-"}]' + print(f'{prefix} {bar} {percent}% {suffix}', end="\r") diff --git a/python/src/ouster/sdk/util/py.typed b/python/src/ouster/sdk/util/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/python/src/ouster/sdk/viz/__init__.py b/python/src/ouster/sdk/viz/__init__.py new file mode 100644 index 00000000..2d2d5d7a --- /dev/null +++ b/python/src/ouster/sdk/viz/__init__.py @@ -0,0 +1,36 @@ +""" +Copyright (c) 2023, Ouster, Inc. +All rights reserved. + +Ouster Visualizer (aka PointViz and tools) +""" +# flake8: noqa (unused imports) + +from ._viz import PointViz +from ._viz import Cloud +from ._viz import Image +from ._viz import Cuboid +from ._viz import Label +from ._viz import WindowCtx +from ._viz import Camera +from ._viz import TargetDisplay +from ._viz import add_default_controls +from ._viz import calref_palette +from ._viz import spezia_palette +from ._viz import grey_palette +from ._viz import viridis_palette +from ._viz import magma_palette + +from .core import push_point_viz_handler +from .core import LidarScanViz +from .core import SimpleViz +from .core import scans_accum_for_cli +from .view_mode import ImageMode +from .view_mode import CloudMode +from .view_mode import ImageCloudMode +from .core import CloudPaletteItem +from .core import VizExtraMode + +from .util import AxisWithLabel + +from .scans_accum import ScansAccumulator \ No newline at end of file diff --git a/python/src/ouster/viz/_viz.pyi b/python/src/ouster/sdk/viz/_viz.pyi similarity index 95% rename from python/src/ouster/viz/_viz.pyi rename to python/src/ouster/sdk/viz/_viz.pyi index dc9ac3da..f3efa2d9 100644 --- a/python/src/ouster/viz/_viz.pyi +++ b/python/src/ouster/sdk/viz/_viz.pyi @@ -9,13 +9,17 @@ from typing import Callable, overload, Tuple, List import numpy as np -from ..client import SensorInfo +from ouster.sdk.client import SensorInfo calref_palette: np.ndarray spezia_palette: np.ndarray +spezia_cal_ref_palette: np.ndarray grey_palette: np.ndarray +grey_cal_ref_palette: np.ndarray viridis_palette: np.ndarray +viridis_cal_ref_palette: np.ndarray magma_palette: np.ndarray +magma_cal_ref_palette: np.ndarray class WindowCtx: @@ -271,6 +275,14 @@ class PointViz: def running(self, state: bool) -> None: ... + @overload + def update_on_input(self) -> bool: + ... + + @overload + def update_on_input(self, state: bool) -> None: + ... + def update(self) -> bool: ... diff --git a/python/src/ouster/viz/core.py b/python/src/ouster/sdk/viz/core.py similarity index 94% rename from python/src/ouster/viz/core.py rename to python/src/ouster/sdk/viz/core.py index c98b1411..85432b30 100644 --- a/python/src/ouster/viz/core.py +++ b/python/src/ouster/sdk/viz/core.py @@ -12,8 +12,10 @@ from functools import partial from enum import Enum import os +import platform import threading import time +import copy from datetime import datetime from typing import (Callable, ClassVar, Deque, Dict, Generic, Iterable, List, Optional, Tuple, TypeVar, Union, Any) @@ -23,11 +25,13 @@ import numpy as np from PIL import Image as PILImage -from ouster import client -from ouster.client import ChanField, ShotLimitingStatus, ThermalShutdownStatus +from ouster.sdk import client +from ouster.sdk.client import ChanField, ShotLimitingStatus, ThermalShutdownStatus from ._viz import (PointViz, Cloud, Image, Cuboid, Label, WindowCtx, Camera, TargetDisplay, add_default_controls, calref_palette, - spezia_palette, grey_palette, viridis_palette, magma_palette) + spezia_palette, spezia_cal_ref_palette, grey_palette, + grey_cal_ref_palette, viridis_palette, viridis_cal_ref_palette, + magma_palette, magma_cal_ref_palette) from .util import push_point_viz_handler, push_point_viz_fb_handler from . import util as vizu @@ -36,8 +40,8 @@ from .scans_accum import ScansAccumulator -from ouster.sdkx.util import img_aspect_ratio # type:ignore -import platform +from ouster.sdk.util import img_aspect_ratio # type:ignore + logger = logging.getLogger("viz-logger") client_log_location = None @@ -145,9 +149,13 @@ def __init__( self._cloud_enabled = [True, True] self._cloud_pt_size = 2.0 - self._cloud_refl_mode_prev = False - self._cloud_refl_mode = False + self._cloud_palette_prev: Optional[CloudPaletteItem] = None + + # make a special version of the calref palette for images + self._image_calref_palette = copy.deepcopy(calref_palette) + self._image_calref_palette[0] = [0.1, 0.1, 0.1] + # Note these 2 palette arrays must always be the same length self._cloud_palettes: List[CloudPaletteItem] self._cloud_palettes = [ CloudPaletteItem("Ouster Colors", spezia_palette), @@ -157,13 +165,24 @@ def __init__( CloudPaletteItem("Cal. Ref", calref_palette), ] + self._refl_cloud_palettes: List[CloudPaletteItem] + self._refl_cloud_palettes = [ + CloudPaletteItem("Cal. Ref. Ouster Colors", spezia_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Greyscale", grey_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Viridis", viridis_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Magma", magma_cal_ref_palette), + CloudPaletteItem("Cal. Ref", calref_palette), + ] + # Add extra color palettes, usually inserted through plugins self._cloud_palettes.extend(_viz_extra_palettes) + self._refl_cloud_palettes.extend(_viz_extra_palettes) self._cloud_palettes.extend(_ext_palettes or []) + self._refl_cloud_palettes.extend(_ext_palettes or []) self._cloud_palette_ind = 0 - self._cloud_palette = self._cloud_palettes[self._cloud_palette_ind] + self._cloud_palette = self._refl_cloud_palettes[self._cloud_palette_ind] self._cloud_palette_name = self._cloud_palette.name # image display state @@ -283,7 +302,7 @@ def handle_keys(self: LidarScanViz, ctx: WindowCtx, key: int, mods: int) -> bool: if (key, mods) in key_bindings: key_bindings[key, mods](self) - self.draw() + self.draw(update=self._viz.update_on_input()) return True key_definitions: Dict[str, str] = { @@ -294,7 +313,7 @@ def handle_keys(self: LidarScanViz, ctx: WindowCtx, key: int, "e / E": "Increase/decrease size of displayed 2D images", "p / P": "Increase/decrease point size", "R": "Reset camera orientation", - "ctr+r": "Camera bird-eye view", + "CTRL+r": "Camera bird-eye view", "0": "Toggle orthographic camera", "1": "Toggle point cloud 1 visibility", "2": "Toggle point cloud 2 visibility", @@ -308,6 +327,7 @@ def handle_keys(self: LidarScanViz, ctx: WindowCtx, key: int, '?': "Print keys to standard out", "= / -": "Dolly in and out", "' / \"": "Increase/decrease spacing in range markers", + "CTRL+'": "Cycle through thickness of range markers or hide", 'SHIFT': "Camera Translation with mouse drag", 'ESC': "Exit the application", } @@ -332,7 +352,6 @@ def cycle_cloud_mode(self, *, direction: int = 1) -> None: """Change the coloring mode of the 3D point cloud.""" with self._lock: self._cloud_mode_ind = (self._cloud_mode_ind + direction) - self._cloud_refl_mode = False def cycle_cloud_palette(self, *, direction: int = 1) -> None: """Change the color palette of the 3D point cloud.""" @@ -341,7 +360,6 @@ def cycle_cloud_palette(self, *, direction: int = 1) -> None: self._cloud_palette_ind = (self._cloud_palette_ind + npalettes + direction) % npalettes self._cloud_palette = self._cloud_palettes[self._cloud_palette_ind] - self._cloud_refl_mode = False def toggle_cloud(self, i: int) -> None: """Toggle whether the i'th return is displayed.""" @@ -388,8 +406,9 @@ def update_ring_size(self, amount: int) -> None: def cicle_ring_line_width(self) -> None: """Change rings line width.""" with self._lock: - self._ring_line_width = max(1, (self._ring_line_width + 1) % 10) + self._ring_line_width = max(0, (self._ring_line_width + 1) % 10) self._viz.target_display.set_ring_line_width(self._ring_line_width) + self._viz.target_display.enable_rings(self._ring_line_width != 0) def toggle_osd(self, state: Optional[bool] = None) -> None: """Show or hide the on-screen display.""" @@ -514,18 +533,16 @@ def _draw(self) -> None: cloud_mode = cloud_modes[self._cloud_mode_ind] refl_mode = is_norm_reflectivity_mode(cloud_mode) + + current_palette = None if refl_mode: - if not self._cloud_refl_mode_prev: - self._cloud_palette = CloudPaletteItem("Cal. Ref", calref_palette) - self._cloud_refl_mode = True - elif not self._cloud_refl_mode: - self._cloud_palette = self._cloud_palettes[ - self._cloud_palette_ind] + current_palette = self._refl_cloud_palettes[self._cloud_palette_ind] else: - if self._cloud_refl_mode_prev: - self._cloud_palette = self._cloud_palettes[ - self._cloud_palette_ind] - self._cloud_refl_mode_prev = refl_mode + current_palette = self._cloud_palettes[self._cloud_palette_ind] + + if self._cloud_palette_prev is None or self._cloud_palette_prev.name != current_palette.name: # type: ignore + self._cloud_palette = current_palette + self._cloud_palette_prev = current_palette for i, range_field in ((0, ChanField.RANGE), (1, ChanField.RANGE2)): if range_field in scan.fields: @@ -560,7 +577,7 @@ def _draw(self) -> None: refl_mode = is_norm_reflectivity_mode(img_mode) if refl_mode and not self._img_refl_mode[i]: - self._images[i].set_palette(calref_palette) + self._images[i].set_palette(self._image_calref_palette) if not refl_mode and self._img_refl_mode[i]: self._images[i].clear_palette() self._img_refl_mode[i] = refl_mode @@ -641,7 +658,6 @@ def _draw_update_flags_mode(self) -> None: mask_opacity = (self._scan.field(flag_field) & 0x1) * 1.0 self._cloud_masks[i][:, :, 3] = mask_opacity self._clouds[i].set_mask(self._cloud_masks[i]) - # set range to zero where first flags bit is set elif self._flags_mode == LidarScanViz.FlagsMode.HIDE_BLOOM: for i, flag_field, range_field in ((0, ChanField.FLAGS, @@ -807,6 +823,7 @@ def __init__(self, # pausing and stepping self._cv = threading.Condition() self._paused = False + self._viz.update_on_input(self._paused) self._step = 0 self._proc_exit = False @@ -914,6 +931,7 @@ def toggle_pause(self) -> None: """Pause or unpause the visualization.""" with self._cv: self._paused = not self._paused + self._viz.update_on_input(self._paused) self._update_playback_osd() if not self._paused: self._cv.notify() @@ -922,6 +940,7 @@ def seek_relative(self, n_frames: int) -> None: """Seek forward of backwards in the stream.""" with self._cv: self._paused = True + self._viz.update_on_input(self._paused) self._step = n_frames self._update_playback_osd() self._cv.notify() @@ -1006,6 +1025,7 @@ def _process(self, seekable: _Seekable[client.LidarScan]) -> None: # process new data scan_idx = seekable.next_ind scan = next(seekable) + # TODO[pb]: Now scan_idx keeps increasing if looped source # is presented, thus there is a need to keep track the lapsed # scan_idx and pass it always as a valid scan number starting @@ -1020,6 +1040,7 @@ def _process(self, seekable: _Seekable[client.LidarScan]) -> None: if self._pause_at == scan_idx: self._paused = True + self._viz.update_on_input(self._paused) self._update_playback_osd() @@ -1036,9 +1057,16 @@ def _process(self, seekable: _Seekable[client.LidarScan]) -> None: # show new data self._viz.update() except StopIteration: - if self._on_eof == 'exit': + if not self._paused and not self._on_eof == "stop": break + # Pause after we get a StopIteration in eof "stop" + if self._on_eof == "stop": + self._paused = True + self._viz.update_on_input(self._paused) + self._update_playback_osd() + self._viz.update() + finally: # signal rendering (main) thread to exit, with a delay # because the viz in main thread may not have been started diff --git a/python/src/ouster/sdk/viz/multi_viz.py b/python/src/ouster/sdk/viz/multi_viz.py new file mode 100644 index 00000000..7d0c8ca7 --- /dev/null +++ b/python/src/ouster/sdk/viz/multi_viz.py @@ -0,0 +1,653 @@ +# type: ignore +from typing import Optional, Dict, Tuple, List, Callable, Union +from enum import Enum + +import os +import numpy as np +import threading +from functools import partial + +from ouster.sdk import client +from ouster.sdk.client import ChanField +import ouster.sdk.viz as viz +from ouster.sdk.viz import AxisWithLabel +from ouster.sdk.util import img_aspect_ratio +from ouster.sdk.viz import (PointViz, WindowCtx, push_point_viz_handler, + calref_palette, spezia_palette) + + +class MultiLidarScanViz: + """Multi LidarScan clouds visualizer""" + + class CloudMode(Enum): + REFLECTIVITY = 0 + RANGE = 1 + SIGNAL = 2 + NEAR_IR = 3 + RGB = 4 + + _mode_to_channels: Dict[CloudMode, List[ChanField]] = { + CloudMode.RANGE: [ChanField.RANGE], + CloudMode.REFLECTIVITY: [ChanField.REFLECTIVITY], + CloudMode.SIGNAL: [ChanField.SIGNAL], + CloudMode.NEAR_IR: [ChanField.NEAR_IR], + CloudMode.RGB: + [ChanField.CUSTOM0, ChanField.CUSTOM1, ChanField.CUSTOM2] + } + + class ImagesLayout(Enum): + HORIZONTAL = 0 + VERTICAL = 1 + + def __init__(self, + metas: List[client.SensorInfo], + *, + source_name: str = "", + point_viz: Optional[PointViz] = None) -> None: + + # used to synchronize key handlers and _draw() + self._lock = threading.Lock() + + self._source_name = source_name + self._metas = metas + assert len(self._metas) > 0, "ERROR: Expect at least one sensor" + + self._sensor_enabled = [True for _ in self._metas] + + self._sensor_img_aspects = [img_aspect_ratio(m) for m in self._metas] + + # initialize Auto Exposures + self._ae_enabled = True + self._ae_signal = [ + client._utils.AutoExposure() for _ in self._metas + ] + self._ae_nearir = [ + client._utils.AutoExposure() for _ in self._metas + ] + self._buc_nearir = [ + client._utils.BeamUniformityCorrector() for _ in self._metas + ] + self._ae_range = client._utils.AutoExposure() + + self._viz = point_viz or PointViz("Ouster Mutli Sensor Viz") + + # initial point size in scan clouds + self._cloud_pt_size = 1.0 + + self._clouds = [] + self._images = [] + self._image_labels = [] + for idx, m in enumerate(self._metas): + # initialize clouds + self._clouds.append(viz.Cloud(m)) + self._clouds[-1].set_point_size(self._cloud_pt_size) + self._viz.add(self._clouds[-1]) + + # initialize images and labels + self._images.append(viz.Image()) + self._viz.add(self._images[-1]) + self._image_labels.append(viz.Label(m.hostname, 0.0, 0.0)) + self._viz.add(self._image_labels[-1]) + + self._cloud_mode = MultiLidarScanViz.CloudMode.REFLECTIVITY + self._cloud_palette = calref_palette + + self._images_layout = MultiLidarScanViz.ImagesLayout.HORIZONTAL + + self._ring_size = 1 + self._ring_line_width = 1 + + # set initial image sizes + self._img_size_fraction = 1 + + # initialize rings + self._viz.target_display.set_ring_size(self._ring_size) + self._viz.target_display.enable_rings(True) + + # initialize osd + self._osd_enabled = True + self._osd = viz.Label("", 0, 1) + self._viz.add(self._osd) + + # initialize scan axis helpers + self._scan_axis_enabled = True + self._scan_axis = [] + # sensors axis + for idx, m in enumerate(self._metas): + self._scan_axis.append( + AxisWithLabel(self._viz, + pose=m.extrinsic, + label=str(idx + 1), + thickness=3)) + # system center axis + self._scan_axis_origin = AxisWithLabel(self._viz, + label="O", + thickness=5, + label_scale=0.4) + + self._scan_poses_enabled = True + + self._column_poses_identity = [] + for m in self._metas: + self._column_poses_identity.append( + np.array([np.eye(4) for _ in range(m.format.columns_per_frame)], + order='F', + dtype=np.float32)) + + self._scan_num = -1 + + # extension point for the OSD text, inserts on top of current OSD + self._osd_text_extra: Callable[[], str] = lambda: "" + + # key bindings. will be called from rendering thread, must be synchronized + key_bindings: Dict[Tuple[int, int], Callable[[MultiLidarScanViz], None]] = { + (ord('H'), 0): MultiLidarScanViz.toggle_scan_axis, + (ord('P'), 0): partial(MultiLidarScanViz.update_point_size, amount=1), + (ord('P'), 1): partial(MultiLidarScanViz.update_point_size, amount=-1), + (ord('M'), 0): MultiLidarScanViz.update_cloud_mode, + (ord('L'), 0): MultiLidarScanViz.update_images_layout, + (ord('E'), 0): partial(MultiLidarScanViz.update_image_size, amount=1), + (ord('E'), 1): partial(MultiLidarScanViz.update_image_size, amount=-1), + (ord('A'), 1): MultiLidarScanViz.toggle_auto_exposure, + (ord('1'), 2): partial(MultiLidarScanViz.toggle_sensor, i=0), + (ord('2'), 2): partial(MultiLidarScanViz.toggle_sensor, i=1), + (ord('3'), 2): partial(MultiLidarScanViz.toggle_sensor, i=2), + (ord('4'), 2): partial(MultiLidarScanViz.toggle_sensor, i=3), + (ord('5'), 2): partial(MultiLidarScanViz.toggle_sensor, i=4), + (ord('6'), 2): partial(MultiLidarScanViz.toggle_sensor, i=5), + (ord('7'), 2): partial(MultiLidarScanViz.toggle_sensor, i=6), + (ord('8'), 2): partial(MultiLidarScanViz.toggle_sensor, i=7), + (ord('9'), 2): partial(MultiLidarScanViz.toggle_sensor, i=8), + (ord("'"), 0): partial(MultiLidarScanViz.update_ring_size, amount=1), + (ord("'"), 1): partial(MultiLidarScanViz.update_ring_size, amount=-1), + (ord("'"), 2): MultiLidarScanViz.cicle_ring_line_width, + (ord("O"), 0): MultiLidarScanViz.toggle_osd, + (ord("T"), 0): MultiLidarScanViz.toggle_scan_poses, + } + + def handle_keys(self: MultiLidarScanViz, ctx: WindowCtx, key: int, + mods: int) -> bool: + if (key, mods) in key_bindings: + draw = key_bindings[key, mods](self) + if draw: + self.draw() + else: + self._viz.update() + return True + + push_point_viz_handler(self._viz, self, handle_keys) + viz.add_default_controls(self._viz) + + self._image_size_initialized = False + + def update_image_size(self, amount: int) -> None: + """Change the size of the 2D image and position image labels.""" + with self._lock: + size_fraction_max = 20 + self._img_size_fraction = (self._img_size_fraction + amount + + (size_fraction_max + 1)) % ( + size_fraction_max + 1) + + # inverted aspects exclusive prefix sum calculations + # used for horizontal 2d images layout + _enabled_sensor_aspects = [ + (i, a) for i, a in enumerate(self._sensor_img_aspects) + if self._sensor_enabled[i] + ] + + _enabled_sensor_aspects_sum = [0] * len(_enabled_sensor_aspects) + _enabled_sensor_aspects_total = 0 + + for idx_num, (idx, aspect) in enumerate(_enabled_sensor_aspects): + if idx_num == 0: + _enabled_sensor_aspects_sum[idx_num] = 0 + else: + _enabled_sensor_aspects_sum[ + idx_num] = _enabled_sensor_aspects_total + _enabled_sensor_aspects_total += 1 / aspect + + # total horizontal width in img coordinates + # NOTE: It's updating on window resize, but there are is no such + # handler, so it's expected to have labels position off till the + # next refresh on "E" or "O" keys + hwidth = 2.0 * self._viz.viewport_width / self._viz.viewport_height + + # total vertical span is 2.0: from [-1.0, 1.0] + vfrac = 2.0 * self._img_size_fraction / size_fraction_max + + _enabled_images = [ + (i, img, img_label) + for i, (img, img_label) in enumerate(zip(self._images, self._image_labels)) + if self._sensor_enabled[i] + ] + + # update image labels using enabled and osd_enabled states + for idx, img_label in enumerate(self._image_labels): + if (self._sensor_enabled[idx] and + self._img_size_fraction != 0 and self._osd_enabled): + sensor_str = self._metas[idx].hostname.replace("_", "..") + img_label.set_text(f"{idx + 1}: {sensor_str}") + else: + img_label.set_text("") + + # placement of enabled images and labels according to image layout + for idx_num, (idx, img, img_label) in enumerate(_enabled_images): + hfrac = vfrac / self._sensor_img_aspects[idx] + if self._images_layout == MultiLidarScanViz.ImagesLayout.HORIZONTAL: + # HORIZONTAL layout + x1_pos = (_enabled_sensor_aspects_sum[idx_num] - + _enabled_sensor_aspects_total / 2) * vfrac + + x2_pos = x1_pos + hfrac + img.set_position(x1_pos, x2_pos, 1 - vfrac, 1) + img.set_hshift(0) + x1_pos_label = x1_pos + # converting to label coordinates + x1_pos_label = (x1_pos_label + hwidth / 2) / hwidth + img_label.set_position(x1_pos_label, 0, align_top=True) + elif self._images_layout == MultiLidarScanViz.ImagesLayout.VERTICAL: + # VERTICAL layout + y1_pos = (idx_num + 1) * vfrac + y2_pos = idx_num * vfrac + # left alignment of the images + x1_pos = 0 + x2_pos = hfrac + img.set_position(x1_pos, x2_pos, 1 - y1_pos, + 1 - y2_pos) + img.set_hshift(-1) + x1_pos_label = - hwidth / 2 + # converting to label coordinates + x1_pos_label = (x1_pos_label + hwidth / 2) / hwidth # i.e. 0 + img_label.set_position(x1_pos_label, + 1 - (2 - y2_pos) / 2, + align_top=True) + + self._viz.camera.set_proj_offset(0, vfrac / 2) + return False + + @property + def metadata(self) -> List[client.SensorInfo]: + """Metadatas for the displayed sensors.""" + return self._metas + + @property + def scan(self) -> Tuple[Optional[client.LidarScan]]: + """The currently displayed scans.""" + return self._scan + + @property + def scan_num(self) -> int: + """The currently displayed scan number""" + return self._scan_num + + def update(self, + scan: Union[client.LidarScan, + Tuple[Optional[client.LidarScan]]], + scan_num: Optional[int] = None) -> None: + self._scan = [scan] if isinstance(scan, client.LidarScan) else scan + if scan_num is not None: + self._scan_num = scan_num + else: + self._scan_num += 1 + + def draw(self, update: bool = True) -> bool: + """Process and draw the latest state to the screen.""" + with self._lock: + self._draw() + + if not self._image_size_initialized: + self.update_image_size(0) + self._image_size_initialized = True + + if update: + return self._viz.update() + else: + return False + + # i/o and processing, called from client thread + # usually need to synchronize with key handlers, which run in render thread + def _draw(self) -> None: + + # update combined Auto Exposure for multiple frames + if self._ae_enabled: + if self._cloud_mode == MultiLidarScanViz.CloudMode.RANGE: + cloud_keys = np.empty(0) + for idx, ls in enumerate(self._scan): + if self._sensor_enabled[idx] and ls is not None: + cloud_keys = np.concatenate( + (cloud_keys, ls.field(ChanField.RANGE).ravel())) + self._ae_range(cloud_keys.reshape((-1, 1)), update_state=True) + + palette = self._cloud_palette + self._cloud_palette = None + + for idx, ls in enumerate(self._scan): + + if ls is not None: + + self._clouds[idx].set_range(ls.field(ChanField.RANGE)) + + if self._cloud_mode == MultiLidarScanViz.CloudMode.REFLECTIVITY: + key_data = ls.field(ChanField.REFLECTIVITY).astype( + np.float32) / 255.0 + + self._clouds[idx].set_key(key_data) + self._images[idx].set_image( + client.destagger(self._metas[idx], key_data)) + elif self._cloud_mode == MultiLidarScanViz.CloudMode.SIGNAL: + key_data = ls.field(ChanField.SIGNAL).astype(np.float32) + if self._ae_enabled: + self._ae_signal[idx](key_data) + else: + key_data = key_data / np.max(key_data) + + self._clouds[idx].set_key(key_data) + self._images[idx].set_image( + client.destagger(self._metas[idx], key_data)) + elif self._cloud_mode == MultiLidarScanViz.CloudMode.NEAR_IR: + key_data = ls.field(ChanField.NEAR_IR).astype(np.float32) + self._buc_nearir[idx](key_data) + if self._ae_enabled: + self._ae_nearir[idx](key_data) + else: + key_data = key_data / np.max(key_data) + + self._clouds[idx].set_key(key_data) + self._images[idx].set_image( + client.destagger(self._metas[idx], key_data)) + elif self._cloud_mode == MultiLidarScanViz.CloudMode.RANGE: + key_data = ls.field(ChanField.RANGE).astype(np.float32) + if self._ae_enabled: + self._ae_range(key_data, update_state=False) + else: + key_data = key_data / np.max(key_data) + + self._clouds[idx].set_key(key_data) + self._images[idx].set_image( + client.destagger(self._metas[idx], key_data)) + elif self._cloud_mode == MultiLidarScanViz.CloudMode.RGB: + r = ls.field(ChanField.CUSTOM0) + g = ls.field(ChanField.CUSTOM1) + b = ls.field(ChanField.CUSTOM2) + + normalizer = 255 + + # for types other than uint8 for RED, GREEN, BLUE channels + # we try to check are there really value bigger than 255 + if (r.dtype != np.uint8 or g.dtype != np.uint8 + or b.dtype != np.uint8): + max_rgb = np.max((np.max(r), np.max(g), np.max(b))) + if max_rgb > 255: + normalizer = 65535 + + r = (r / normalizer).clip(0, 1.0).astype(np.float32) + g = (g / normalizer).clip(0, 1.0).astype(np.float32) + b = (b / normalizer).clip(0, 1.0).astype(np.float32) + + rgb_data = np.dstack((r, g, b)) + + self._clouds[idx].set_key(rgb_data) + self._images[idx].set_image( + client.destagger(self._metas[idx], rgb_data)) + + else: + key_zeros = np.zeros( + (self._metas[idx].format.pixels_per_column, + self._metas[idx].format.columns_per_frame, 3)) + self._clouds[idx].set_key(key_zeros) + self._images[idx].set_image(key_zeros) + + if palette is not None: + self._clouds[idx].set_palette(palette) + + if self._cloud_mode == MultiLidarScanViz.CloudMode.REFLECTIVITY: + self._images[idx].set_palette(calref_palette) + else: + self._images[idx].clear_palette() + + self._update_multi_viz_osd() + self._draw_update_scan_poses() + + def _update_multi_viz_osd(self): + if self._osd_enabled: + if self._scan is None: + # TODO: show something in OSD when there is no scan? Well, we + # shouldn't be in this state ... + return + + frame_ts = min([ + client.first_valid_packet_ts(s) for s in self._scan + if s is not None + ]) + + sensors_str = " ".join([ + f"{num}{' ' if not enabled else '-' if scan is None else '*'}" + for num, (enabled, scan) in enumerate( + zip(self._sensor_enabled, self._scan), start=1) + ]) + source_str = "" + if self._source_name: + source_str += "\nsource: " + os.path.basename( + self._source_name.rstrip(os.sep)).replace("_", "..") + + if (self._ae_enabled and self._cloud_mode not in [ + MultiLidarScanViz.CloudMode.REFLECTIVITY, + MultiLidarScanViz.CloudMode.RGB + ]): + ae_str = '(AE)' + else: + ae_str = '' + + # extension point for the OSD text, inserts before the "axes" line + osd_str_extra = self._osd_text_extra() + if osd_str_extra: + osd_str_extra += "\n" + + self._osd.set_text(f"{osd_str_extra}frame ts: {frame_ts}\n" + f"sensors: {sensors_str}\n" + f"cloud mode: {self._cloud_mode.name}" + f" {ae_str}" + f"{source_str if source_str else ''}") + else: + self._osd.set_text("") + + def _draw_update_scan_poses(self) -> None: + """Apply poses from the Scans to the scene""" + + # handle Axis and Camera poses + if self._scan_poses_enabled: + + # scan with the minimal timestamp determines the + # center of the system (by it's scan pose) + min_scan_ts_idx = min( + [i for i, s in enumerate(self._scan) if s is not None], + key=lambda i: client.first_valid_packet_ts(self._scan[i])) + + pose = client.first_valid_column_pose(self._scan[min_scan_ts_idx]) + + self._viz.camera.set_target(np.linalg.inv(pose)) + self._scan_axis_origin.pose = pose + + # update all sensor axis positions + for axis, m in zip(self._scan_axis, self._metas): + axis.pose = pose @ m.extrinsic + + else: + # without poses camera always points to the origin + self._viz.camera.set_target(np.eye(4)) + + self._scan_axis_origin.pose = np.eye(4) + for axis, m in zip(self._scan_axis, self._metas): + axis.pose = m.extrinsic + + # handle Cloud poses + for idx, ls in enumerate(self._scan): + if ls is not None: + if self._scan_poses_enabled: + self._clouds[idx].set_column_poses(ls.pose) + else: + self._clouds[idx].set_column_poses( + self._column_poses_identity[idx]) + + def toggle_scan_poses(self) -> None: + """Toggle the scan poses use""" + with self._lock: + if self._scan_poses_enabled: + self._scan_poses_enabled = False + print("MuliScanViz: Key SHIFT-T: Scan Poses: OFF") + else: + self._scan_poses_enabled = True + print("MultiScanViz: Key SHIFT-T: Scan Poses: ON") + return True + + def update_ring_size(self, amount: int) -> None: + """Change distance ring size.""" + with self._lock: + self._ring_size = min(3, max(-2, self._ring_size + amount)) + self._viz.target_display.set_ring_size(self._ring_size) + return False + + def cicle_ring_line_width(self) -> None: + """Change rings line width.""" + with self._lock: + self._ring_line_width = max(1, (self._ring_line_width + 1) % 10) + self._viz.target_display.set_ring_line_width(self._ring_line_width) + return False + + def toggle_osd(self, state: Optional[bool] = None) -> None: + """Show or hide the on-screen display.""" + with self._lock: + self._osd_enabled = not self._osd_enabled if state is None else state + print("Toggle OSD to: ", self._osd_enabled) + self.update_image_size(0) + return True + + def toggle_scan_axis(self) -> None: + """Toggle the helper axis of a scan ON/OFF""" + with self._lock: + if self._scan_axis_enabled: + print("MultiLidarScanViz: Key H: Scan Axis Helper - OFF") + self._scan_axis_enabled = False + self._scan_axis_origin.disable() + for axis in self._scan_axis: + axis.disable() + else: + print("MultiLidarScanViz: Key H: Scan Axis Helper - ON") + self._scan_axis_enabled = True + self._scan_axis_origin.enable() + for axis, enabled in zip(self._scan_axis, self._sensor_enabled): + if enabled: + axis.enable() + return False + + def toggle_sensor(self, i: int) -> None: + """Toggle whether the i'th sensor data is displayed.""" + if i >= len(self._metas): + return + with self._lock: + if self._sensor_enabled[i]: + self._sensor_enabled[i] = False + self._viz.remove(self._clouds[i]) + self._viz.remove(self._images[i]) + self._scan_axis[i].disable() + else: + self._sensor_enabled[i] = True + self._viz.add(self._clouds[i]) + self._viz.add(self._images[i]) + if self._scan_axis_enabled: + self._scan_axis[i].enable() + self.update_image_size(0) + self._update_multi_viz_osd() + return False + + def toggle_auto_exposure(self) -> None: + """Toggle the AutoExposure use for 2d images ans clouds""" + with self._lock: + if self._ae_enabled: + print("MultiLidarScanViz: Key SHIFT-A: AutoExposure - OFF") + self._ae_enabled = False + else: + print("MultiLidarScanViz: Key SHIFT-A: AutoExposure - ON") + self._ae_enabled = True + return True + + def update_point_size(self, amount: int) -> None: + """Change the point size of all clouds.""" + with self._lock: + self._cloud_pt_size = min(10.0, + max(1.0, self._cloud_pt_size + amount)) + for cloud in self._clouds: + cloud.set_point_size(self._cloud_pt_size) + return False + + @staticmethod + def _next_cloud_mode( + mode: 'MultiLidarScanViz.CloudMode') -> 'MultiLidarScanViz.CloudMode': + return MultiLidarScanViz.CloudMode( + (mode.value + 1) % len(MultiLidarScanViz.CloudMode.__members__)) + + def _cloud_mode_available(self, mode: 'MultiLidarScanViz.CloudMode') -> bool: + """Checks whether data is present for the correspoding Cloud mode in all scans""" + if self._scan is None: + return True + if mode in self._mode_to_channels: + mode_chans = self._mode_to_channels[mode] + for ls in self._scan: + if ls is not None and not all( + [ch in ls.fields for ch in mode_chans]): + return False + return True + return False + + def _next_available_cloud_mode( + self, mode: 'MultiLidarScanViz.CloudMode' + ) -> Optional['MultiLidarScanViz.CloudMode']: + """Switch cloud mode to the next available that can be drawn""" + next_mode = self._next_cloud_mode(mode) + cnt = 0 + while (cnt < len(MultiLidarScanViz.CloudMode.__members__) + and not self._cloud_mode_available(next_mode)): + next_mode = self._next_cloud_mode(next_mode) + cnt += 1 + if cnt < len(MultiLidarScanViz.CloudMode.__members__): + return next_mode + else: + return None + + def update_cloud_mode(self, + mode: 'Optional[MultiLidarScanViz.CloudMode]' = None + ) -> None: + with self._lock: + # cycle between cloud mode enum values + if mode is None: + next_mode = self._next_available_cloud_mode(self._cloud_mode) + if next_mode is not None: + self._cloud_mode = next_mode + else: + print("ERROR: no cloud mode has data in frames") + else: + if self._cloud_mode_available(mode): + self._cloud_mode = mode + + if self._cloud_mode == MultiLidarScanViz.CloudMode.REFLECTIVITY: + self._cloud_palette = calref_palette + else: + self._cloud_palette = spezia_palette + + print("Cloud Mode: ", self._cloud_mode) + return True + + def update_images_layout( + self, + layout: 'Optional[MultiLidarScanViz.ImagesLayout]' = None) -> None: + with self._lock: + # cycle between images layout enum values + if layout is None: + self._images_layout = MultiLidarScanViz.ImagesLayout( + (self._images_layout.value + 1) % + len(MultiLidarScanViz.ImagesLayout.__members__)) + else: + self._images_layout = layout + print("Images Layout: ", self._images_layout) + self.update_image_size(0) + return False diff --git a/python/src/ouster/sdk/viz/py.typed b/python/src/ouster/sdk/viz/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/python/src/ouster/viz/scans_accum.py b/python/src/ouster/sdk/viz/scans_accum.py similarity index 95% rename from python/src/ouster/viz/scans_accum.py rename to python/src/ouster/sdk/viz/scans_accum.py index b7fdf487..cb258c6c 100644 --- a/python/src/ouster/viz/scans_accum.py +++ b/python/src/ouster/sdk/viz/scans_accum.py @@ -18,12 +18,14 @@ import logging from ._viz import (PointViz, WindowCtx, Label, Cloud, grey_palette, - spezia_palette, magma_palette, viridis_palette, + grey_cal_ref_palette, spezia_palette, spezia_cal_ref_palette, + magma_palette, magma_cal_ref_palette, + viridis_palette, viridis_cal_ref_palette, calref_palette) from .util import push_point_viz_handler -import ouster.client as client -from ouster.client import ChanField -import ouster.sdk.pose_util as pu +import ouster.sdk.client as client +from ouster.sdk.client import ChanField +import ouster.sdk.util.pose_util as pu from .view_mode import (CloudMode, ReflMode, SimpleMode, is_norm_reflectivity_mode, CloudPaletteItem) @@ -184,8 +186,9 @@ def __init__(self, self._cloud_mode_ind = 0 self._cloud_mode_ind_prev = (self._cloud_mode_ind + 1) % len(self._cloud_modes) - # cloud color palettes to use - self._cloud_palettes: List[CloudPaletteItem] = [ + # Note these 2 palette arrays must always be the same length + self._cloud_palettes: List[CloudPaletteItem] + self._cloud_palettes = [ CloudPaletteItem("Ouster Colors", spezia_palette), CloudPaletteItem("Greyscale", grey_palette), CloudPaletteItem("Viridis", viridis_palette), @@ -193,23 +196,18 @@ def __init__(self, CloudPaletteItem("Cal. Ref", calref_palette), ] - # Cal. Ref. is separate because we explicitly set it on REFLECTIVITY - # color mode and restrict rotations of palettes when it's reflectivity - self._cloud_calref_palette = CloudPaletteItem("Cal. Ref", calref_palette) + self._refl_cloud_palettes: List[CloudPaletteItem] + self._refl_cloud_palettes = [ + CloudPaletteItem("Cal. Ref. Ouster Colors", spezia_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Greyscale", grey_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Viridis", viridis_cal_ref_palette), + CloudPaletteItem("Cal. Ref. Magma", magma_cal_ref_palette), + CloudPaletteItem("Cal. Ref", calref_palette), + ] # init cloud palette toggle self._cloud_palette_ind = 0 - self._cloud_palette_ind_prev = self._cloud_palette_ind - - # whether it's currently "snapped" to the Cal.Ref. palette - self._cloud_palette_refl_mode = False - - # trigger the palette check, so it initializes the _cloud_palette_refl_mode - # variable correctly for situations when OSD text is drawn before the - # call to draw() - # TODO[pb]: Make it less convoluted with palettes toggling and Cal.Ref. - # snapping - self._update_cloud_palette() + self._cloud_palette_prev = self._refl_cloud_palettes[self._cloud_palette_ind] # initialize MAP structs map_init_points_num = MAP_INIT_POINTS_NUM if self._map_enabled else 0 @@ -483,8 +481,6 @@ def _draw_map(self) -> None: logger.debug("ACTIVE CLOUD MODE (MAP): %s", self.active_cloud_mode.name) update_palette = self._update_cloud_palette() - if update_palette is not None: - self._cloud_map.set_palette(update_palette.palette) @no_type_check def _draw_accum(self) -> None: @@ -567,9 +563,6 @@ def _draw_accum(self) -> None: if self._cloud_mode_ind_prev != self._active_cloud_mode_ind: acloud.set_key(sr.cloud_mode_keys[mode_name]) - if update_palette is not None: - acloud.set_palette(update_palette.palette) - @no_type_check def toggle_mode_accum(self, state: Optional[bool] = None) -> bool: """Toggle ACCUM view""" @@ -617,7 +610,6 @@ def cycle_cloud_mode(self, *, direction: int = 1) -> bool: """Change the coloring mode of the point cloud for MAP/ACCUM clouds""" with self._lock: self._cloud_mode_ind = (self._cloud_mode_ind + direction) - self._cloud_palette_refl_mode = False # update internal states immediately so the OSD text of scans accum # is switched already to a good state (needed for LidarScanViz osd # update) @@ -630,7 +622,6 @@ def cycle_cloud_palette(self, *, direction: int = 1) -> bool: npalettes = len(self._cloud_palettes) self._cloud_palette_ind = (self._cloud_palette_ind + direction + npalettes) % npalettes - self._cloud_palette_refl_mode = False # update internal states immediately so the OSD text of scans accum # is switched already to a good state (needed for LidarScanViz osd # update) @@ -748,6 +739,9 @@ def update(self, # refine available modes based on the current scan ls = self._scan[self._sensor_idx] + if ls is None: + return + self._available_modes = list( filter(lambda midx: self._cloud_modes[midx].enabled(ls), self._available_modes)) @@ -988,9 +982,7 @@ def map_visible(self) -> bool: @property def active_cloud_palette(self) -> CloudPaletteItem: """Cloud palette used for ACCUM/MAP clouds""" - return (self._cloud_palettes[self._active_cloud_palette_ind] - if not self._cloud_palette_refl_mode else - self._cloud_calref_palette) + return self._cloud_palette_prev @property def _active_cloud_palette_ind(self) -> int: @@ -1007,18 +999,25 @@ def _update_cloud_palette(self) -> Optional[CloudPaletteItem]: mode and coloring options. """ refl_mode = is_norm_reflectivity_mode(self.active_cloud_mode) - if self._cloud_mode_ind_prev != self._active_cloud_mode_ind: - refl_mode_prev = is_norm_reflectivity_mode( - self._cloud_modes[self._cloud_mode_ind_prev]) - - if refl_mode_prev and not refl_mode: - return self.active_cloud_palette - elif not refl_mode_prev and refl_mode: - # snap to the Cal.Ref. palette until cloud mode or palette cycled - self._cloud_palette_refl_mode = True - return self._cloud_calref_palette - - if (self._cloud_palette_ind_prev != self._active_cloud_palette_ind): + + current_palette = None + if refl_mode: + current_palette = self._refl_cloud_palettes[self._cloud_palette_ind] + else: + current_palette = self._cloud_palettes[self._cloud_palette_ind] + + if self._cloud_palette_prev is None or self._cloud_palette_prev.name != current_palette.name: + self._cloud_palette_prev = current_palette + + # update palettes on everything + if self._clouds_accum: + for acloud, kf_idx in zip(self._clouds_accum, self._key_frames): + if acloud: + acloud.set_palette(current_palette.palette) + + if self._cloud_map: + self._cloud_map.set_palette(current_palette.palette) + return self.active_cloud_palette return None @@ -1050,7 +1049,6 @@ def _draw(self) -> None: # saving the "pen" and palette that we drew everything with self._cloud_mode_ind_prev = self._active_cloud_mode_ind - self._cloud_palette_ind_prev = self._active_cloud_palette_ind self._last_draw_dt = time.monotonic() - t diff --git a/python/src/ouster/viz/util.py b/python/src/ouster/sdk/viz/util.py similarity index 99% rename from python/src/ouster/viz/util.py rename to python/src/ouster/sdk/viz/util.py index fd0a4cb6..1f3f5003 100644 --- a/python/src/ouster/viz/util.py +++ b/python/src/ouster/sdk/viz/util.py @@ -3,7 +3,7 @@ import weakref import numpy as np -import ouster.sdk.pose_util as pu +import ouster.sdk.util.pose_util as pu from ._viz import PointViz, Cloud, Label, WindowCtx diff --git a/python/src/ouster/viz/view_mode.py b/python/src/ouster/sdk/viz/view_mode.py similarity index 98% rename from python/src/ouster/viz/view_mode.py rename to python/src/ouster/sdk/viz/view_mode.py index fa1448f1..cba16957 100644 --- a/python/src/ouster/viz/view_mode.py +++ b/python/src/ouster/sdk/viz/view_mode.py @@ -3,9 +3,9 @@ from dataclasses import dataclass import numpy as np -from ouster import client -from ..client._client import Version -from ouster.client import _utils +from ouster.sdk import client +from ouster.sdk.client._client import Version +from ouster.sdk.client import _utils from ._viz import Cloud, Image diff --git a/python/src/ouster/sdkx/__init__.py b/python/src/ouster/sdkx/__init__.py deleted file mode 100644 index d3a5e6d2..00000000 --- a/python/src/ouster/sdkx/__init__.py +++ /dev/null @@ -1,4 +0,0 @@ -"""Internal sensor tools.""" -# flake8: noqa: F401 (unused imports) -from pkgutil import extend_path -__path__ = extend_path(__path__, __name__) # Declare as a namespace package - see PEP 420 diff --git a/python/src/ouster/sdkx/mapping/README.rst b/python/src/ouster/sdkx/mapping/README.rst deleted file mode 100644 index 632c6c1b..00000000 --- a/python/src/ouster/sdkx/mapping/README.rst +++ /dev/null @@ -1,14 +0,0 @@ -====================== -Mapping Python Package -====================== - -:Maintainers: Hao Yuan -:Description: Ouster mapping python package -:Project-type: lib/Python - - -Summary -======= -Ouster mapping python package. - - diff --git a/python/src/ouster/sdkx/mapping/__init__.py b/python/src/ouster/sdkx/mapping/__init__.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/__init__.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/mapping/kiss_backend.py b/python/src/ouster/sdkx/mapping/kiss_backend.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/kiss_backend.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/mapping/mapping.py b/python/src/ouster/sdkx/mapping/mapping.py deleted file mode 100644 index 0f6ba942..00000000 --- a/python/src/ouster/sdkx/mapping/mapping.py +++ /dev/null @@ -1,407 +0,0 @@ -# type: ignore -import sys -import click -import ipaddress -from typing import Optional -from ouster.cli.core import cli -from ouster.cli.core.util import click_ro_file -import ouster.sdk.pose_util as pu -from ouster.sdk.util import resolve_metadata -from ouster import client -from pathlib import Path -import ouster.osf as osf -import ouster.sdkx.mapping.util as util -from ouster.sdkx.parsing import default_scan_fields -from datetime import datetime -import time - - -@cli.group(name="mapping") -def mapping_group() -> None: - """Mapping tools.""" - pass - - -class SLAMOSFWriter: - - def __init__(self, source: str, output_path: str, chunk_size: int, lidar_port: int, imu_port: int, meta: str): - source_ext = Path(source).suffix - if source_ext == ".pcap": - if meta: - meta_data = meta - else: - meta_data = resolve_metadata(source) - if not meta_data: - raise Exception("File not found, please specify a metadata file with `-m`") - with open(Path(meta_data)) as meta_file: - meta_json = meta_file.read() - lidar_sensor_meta = osf.LidarSensor(meta_json) - elif source_ext == ".local" or ipaddress.ip_address(source): - print(f"Connecting to {source}") - sensor_source = client.Sensor(source, lidar_port, imu_port) - lidar_sensor_meta = osf.LidarSensor(sensor_source._fetched_meta) - else: - raise ValueError("Off") - - self.writer = osf.Writer(output_path, "SLAM_Output_OSF", chunk_size) - # TODO sensor_id may changed depends on how will we handle multi live sensor - sensor_osf_id = dict() - # Fix the sensor id logic. Later need to extend to multisensor - single_sensor_id = 0 - sensor_osf_id[single_sensor_id] = self.writer.addMetadata(lidar_sensor_meta) - - self.lidar_stream = osf.LidarScanStream(self.writer, sensor_osf_id[single_sensor_id]) - - def write_scan(self, scan_ts, scan): - self.lidar_stream.save(scan_ts, scan) - - def close(self): - self.writer.close() - - -def slam_scans_generator(scan_source, slam, osf_writer): - for scan in scan_source: - scan_slam = slam.update(scan) - scan_ts = client.first_valid_packet_ts(scan_slam) - osf_writer.write_scan(scan_ts, scan_slam) - yield scan_slam - - osf_writer.close() - - -@mapping_group.command(name='slam') -@click.argument('source', required=True, type=str) -@click.argument('viz', required=False, default="", type=str) -@click.option('-m', '--meta', required=False, default=None, type=click_ro_file, - help="Metadata file for pcap input") -@click.option('--slam_name', default='kiss_slam', help="Slam name") -@click.option('-l', '--lidar_port', default=7502, help="Lidar port") -@click.option('-i', '--imu_port', default=7503, help="IMU port") -@click.option('-o', '--output', required=False, help="OSF output filename") -@click.option("--accum-num", - default=0, - help="Integer number of scans to accumulate") -@click.option("--accum-every", - default=None, - type=float, - help="Accumulate every Nth scan") -@click.option("--accum-every-m", - default=None, - type=float, - help="Accumulate scan every M meters traveled") -@click.option("--accum-map", - is_flag=True, - help="Enable the overall map accumulation mode") -@click.option("--accum-map-ratio", - default=0.001, - help="Ratio of random points of every scan to add to an overall map") -def run_slam( - source: str, - viz: str, - meta: Optional[str], - slam_name: str, - lidar_port: int, - imu_port: int, - output: Optional[str], - accum_num: int, - accum_every: Optional[int], - accum_every_m: Optional[float], - accum_map: bool, - accum_map_ratio: float) -> None: - """ - Run SLAM algorithm with an optional visualizer - - Run with a sensor or a pcap file to produce an OSF containing the lidar data and SLAM poses. - To turn on visualizer, append 'viz' or 'visualizer' to the command, case insensitive. - """ - run_slam_impl(source, viz, meta, slam_name, lidar_port, imu_port, output, - accum_num, accum_every, accum_every_m, accum_map, - accum_map_ratio) - - -def run_slam_impl(source: str, - viz: str = None, - meta: str = None, - slam_name: str = "kiss_slam", - lidar_port: int = 7502, - imu_port: int = 7503, - output: str = None, - accum_num: int = 0, - accum_every: Optional[int] = None, - accum_every_m: Optional[float] = None, - accum_map: bool = False, - accum_map_ratio: float = 0.001) -> None: - - data_source = util.Source(source, meta = meta) - - if not output: - date_time = datetime.now().strftime("%Y%m%d_%H%M%S") - metadata = data_source.metadata - output = f"{metadata.prod_line}_{metadata.sn}_{metadata.fw_rev}_{metadata.mode}_{date_time}_slam_output.osf" - - if slam_name == "kiss_slam": - try: - from ouster.sdkx.mapping.slam import KissBackend - except ImportError as e: - raise click.ClickException("kiss_icp is not installed. Please run pip install kiss-icp. Error: " + str(e)) - slam = KissBackend(info=data_source.metadata) - else: - raise ValueError("Only support KISS-ICP SLAM for now") - - chunk_size = 0 - osf_writer = SLAMOSFWriter(source, output, chunk_size, lidar_port, imu_port, meta) - - print(f"Running {slam_name} SLAM and start writing LidarScan and Traj into {output}\nhit ctrl-c to exit") - - start_time = time.time() - slam_scan_gen = slam_scans_generator(data_source, slam, osf_writer) - - if viz and viz.lower() in ['viz', 'visualizer']: - try: - from ouster.viz import SimpleViz, scans_accum_for_cli - except ImportError as e: - raise click.ClickException("Error: " + str(e)) - scans_accum = scans_accum_for_cli(data_source.metadata, - accum_num=accum_num, - accum_every=accum_every, - accum_every_m=accum_every_m, - accum_map=accum_map, - accum_map_ratio=accum_map_ratio) - simple_viz = SimpleViz(data_source.metadata, scans_accum=scans_accum) - simple_viz.run(slam_scan_gen) - else: - for _ in slam_scan_gen: - pass - - end_time = time.time() - elapsed_time = end_time - start_time - print(f"Elapsed time: {elapsed_time} seconds") - - -@mapping_group.command(name='convert') -@click.argument('input_file', required=True, type=click_ro_file) -@click.argument('output_file', required=True) -@click.option('-d', '--min_dist', default=2.0, help="Min dist (m) for points to " - "save. Default value is 2m") -@click.option('-s', '--voxel_size', default=0.1, help="Voxel map size for downsampling." - "This parameter is the same as the open3D voxel size. Default value is 0.1. " - "The bigger the value, the fewer points it outputs" - " http://www.open3d.org/docs/0.6.0/python_api/open3d.geometry.voxel_down_sample.html") -@click.option('-f', - '--field', - required=False, - type=click.Choice(['SIGNAL', - 'NEAR_IR', - 'REFLECTIVITY'], - case_sensitive=False), - default="REFLECTIVITY", - help="Chanfield for output file key value. Choose between SIGNAL, NEAR_IR, " - "REFLECTIVITY. Default field is REFLECTIVITY") -@click.option('--print_process', required=False, type=bool, default=True, help="Default is On") -@click.option('--verbose_print', required=False, type=bool, default=False, - help="Print point cloud status much frequently. Default is Off") -def point_cloud_convert(input_file: str, output_file: str, min_dist: float, - voxel_size: float, field: str, - print_process: Optional[bool], verbose_print: Optional[bool]) -> None: - """ - Save point cloud from an OSF file into specific formats - - Output file format depends on output filename extension. The valid output files - extensions are .las, .ply and .pcd. Default output format is .ply. For large point - cloud, the output will be split into multiple files and each file is around 1G. - Currently this tool only supports single lidar OSF files. - """ - try: - import open3d as o3d - import numpy as np - import laspy - except ImportError: - raise click.ClickException("Please verify that open3d, laspy libs are installed") - - # If not specify output filename and format, use input filename and ply format - - output_file_path = Path(output_file) - file_wo_ext = str(output_file_path.stem) - outfile_ext = str(output_file_path.suffix) - if outfile_ext not in [".las", ".ply", ".pcd"]: - sys.exit("Error: output file extension only support .las, .ply, and " - f".pcd, but input file extension is {outfile_ext}") - - points_for_downsample = np.empty(shape = [0, 3]) - points_ds_keys = np.empty(shape = [0, 1]) - - pcd = o3d.t.geometry.PointCloud() - points_total = o3d.t.geometry.PointCloud() - - # hard-coded parameters and counters # - # affect the running time. smaller value mean longer running time. Too large - # may lead to crash - down_sample_steps = 100 - # affect per output file size. 30000000 makes output file size ~1G - max_pnt_per_file = 30000000 - file_numb = 1 - # if enabled, print process every 5% - parts_cnt = 20 - # will be updated using the total msgs - delta_cnt = 0 - - # variables for point cloud status printout - points_sum = 0 - points_saved = 0 - points_zero = 0 - points_near_removed = 0 - points_down_removed = 0 - - infile_ext = str(Path(input_file).suffix) - if infile_ext not in [".osf"]: - sys.exit("Exit! input file extension only support .osf files " - f"but input file extension is {infile_ext}") - - reader = osf.Reader(input_file) - - scans = osf.Scans(input_file) - sensor_meta = reader.meta_store.get(osf.LidarSensor) - info = sensor_meta.info - sensor_id = sensor_meta.id - - valid_fields = default_scan_fields(info.format.udp_profile_lidar, flags=False) - channel_field = client.ChanField.from_string(field) - - if channel_field not in valid_fields: - valid_fields_str = ", ".join(map(str, list(valid_fields.keys()))) - - sys.exit(f"Exit! field {field} is not available in the low bandwidth mode\n" - f"use -f and choose a valid field from {valid_fields_str}") - - scan_streams = reader.meta_store.find(osf.LidarScanStream) - sensor_stream_id = next((mid for mid, m in scan_streams.items() - if m.sensor_meta_id == sensor_id), 0) - - streaming_info = reader.meta_store.get(osf.StreamingInfo) - if streaming_info: - for stream_id, stream_stat in streaming_info.stream_stats: - if stream_id == sensor_stream_id: - total_msgs = stream_stat.message_count - break - - if total_msgs == 0: - sys.exit("Exit! No lidar scan msg found.") - - delta_cnt = total_msgs // parts_cnt - xyzlut = client.XYZLut(info, use_extrinsics=True) - - def down_sample_points(points_for_downsample, points_ds_keys): - nonlocal points_total, pcd, points_down_removed, points_saved - pcd.point.positions = o3d.core.Tensor(points_for_downsample) - # normalized the key - points_ds_keys *= 1 / points_ds_keys.max() - pcd.point[field] = o3d.core.Tensor(points_ds_keys) - down_points = pcd.voxel_down_sample(voxel_size) - points_down_removed += (len(points_for_downsample) - down_points.point.positions.shape[0]) - points_saved += down_points.point.positions.shape[0] - if points_total.is_empty(): - points_total = down_points - else: - points_total = points_total.append(down_points) - pcd.clear() - - def save_file(file_wo_ext: str, outfile_ext: str, points_total): - print(f"Output file: {file_wo_ext + outfile_ext}") - - pc_status_print() - if outfile_ext == ".ply": - # PLY intensity range is 0-1 - o3d.t.io.write_point_cloud((file_wo_ext + outfile_ext), points_total) - elif outfile_ext == ".las": - pos_array = points_total.point.positions[:, :].numpy() - # LAS intensity range is 0-255 - key_array = points_total.point[field][:].numpy() * 255 - LAS_file = laspy.create() - LAS_file.x = pos_array[:, 0] - LAS_file.y = pos_array[:, 1] - LAS_file.z = pos_array[:, 2] - # LAS file only has intensity but we can use it for other field value - LAS_file.intensity = key_array[:, 0] - LAS_file.write(file_wo_ext + outfile_ext) - elif outfile_ext == ".pcd": - pos_array = points_total.point.positions[:, :].numpy() - pcd = o3d.geometry.PointCloud() - pcd.points = o3d.utility.Vector3dVector(pos_array) - o3d.io.write_point_cloud((file_wo_ext + outfile_ext), pcd) - - def pc_status_print(): - nonlocal points_sum, points_near_removed, points_down_removed, points_saved, points_zero - near_minus_zero = points_near_removed - points_zero - near_removed_pernt = (near_minus_zero / points_sum) * 100 - down_removed_pernt = (points_down_removed / points_sum) * 100 - zero_pernt = (points_zero / points_sum) * 100 - save_pernt = (points_saved / points_sum) * 100 - print( - f"{points_sum} points accumulated during this period,\n{near_minus_zero} " - f"near points are removed [{near_removed_pernt:.2f} %],\n{points_down_removed} " - f"down sampling points are removed [{down_removed_pernt:.2f} %],\n{points_zero} " - f"zero range points are removed [{zero_pernt:.2f} %],\n{points_saved} points " - f"are saved [{save_pernt:.2f} %].") - points_sum = 0 - points_zero = 0 - points_saved = 0 - points_near_removed = 0 - points_down_removed = 0 - - empty_pose = True - for scan_idx, scan in enumerate(scans): - # Pose attribute is per col global pose so we use identity for scan pose - column_poses = scan.pose - - if (empty_pose and column_poses.size > 0 - and not np.array_equal(column_poses[client.first_valid_column(scan)], np.eye(4))): - empty_pose = False - - points = xyzlut(scan) - keys = scan.field(channel_field) - if delta_cnt != 0 and (scan_idx + 1) % delta_cnt == 0 and (100 // parts_cnt) * \ - ((scan_idx + 1) // delta_cnt) <= 100: - print(f"{(100//parts_cnt) * ((scan_idx+1)//delta_cnt)} % of data processed") - - # to remove near points - row_index = scan.field(client.ChanField.RANGE) > (min_dist * 1000) - zero_row_index = scan.field(client.ChanField.RANGE) == 0 - dewarped_points = pu.dewarp(points, column_poses=column_poses) - filtered_points = dewarped_points[row_index] - filtered_keys = keys[row_index] - - curr_scan_points = row_index.shape[0] * row_index.shape[1] - points_sum += curr_scan_points - points_near_removed += curr_scan_points - np.count_nonzero(row_index) - points_zero += np.count_nonzero(zero_row_index) - - shaped_keys = filtered_keys.reshape(filtered_keys.shape[0], 1) - points_for_downsample = np.append(points_for_downsample, filtered_points, - axis = 0) - points_ds_keys = np.append(points_ds_keys, shaped_keys, axis = 0) - - # downsample the accumulated point clouds # - if scan_idx % down_sample_steps == 0: - down_sample_points(points_for_downsample, points_ds_keys) - points_for_downsample = np.empty(shape = [0, 3]) - points_ds_keys = np.empty(shape = [0, 1]) - if verbose_print: - pc_status_print() - - # output a file to prevent crash due to oversize # - if points_total.is_empty() is False and points_total.point.positions.shape[0] >= max_pnt_per_file: - save_file(file_wo_ext + str(file_numb), outfile_ext, points_total) - file_numb += 1 - points_total.clear() - - # handle the last part of point cloud or the first part of point cloud if - # the size is less than down_sample_steps - if points_ds_keys.size > 0: - down_sample_points(points_for_downsample, points_ds_keys) - - if empty_pose: - print( - "Warning: Empty lidar scan pose in the OSF file.\n" - "Suggest: Use SLAM output OSF file for conversion. By command: ouster-cli mapping slam FILE") - - save_file(file_wo_ext + str(file_numb), outfile_ext, points_total) diff --git a/python/src/ouster/sdkx/mapping/slam.py b/python/src/ouster/sdkx/mapping/slam.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/slam.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/mapping/slam_backend.py b/python/src/ouster/sdkx/mapping/slam_backend.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/slam_backend.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/mapping/stitch.py b/python/src/ouster/sdkx/mapping/stitch.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/stitch.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/mapping/util.py b/python/src/ouster/sdkx/mapping/util.py deleted file mode 100644 index a80e032f..00000000 --- a/python/src/ouster/sdkx/mapping/util.py +++ /dev/null @@ -1,2 +0,0 @@ -raise RuntimeError("The ouster.sdkx.mapping package has been replaced by the ouster.mapping " -"package provided by the ouster-sdk[mapping] optional package.") diff --git a/python/src/ouster/sdkx/packet_iter.py b/python/src/ouster/sdkx/packet_iter.py deleted file mode 100644 index b87dece4..00000000 --- a/python/src/ouster/sdkx/packet_iter.py +++ /dev/null @@ -1,185 +0,0 @@ -import os -import time -from datetime import datetime -from typing import Callable, Iterable, Iterator, TypeVar, Optional - -from more_itertools import consume - -from ouster.client import Packet, LidarPacket, ImuPacket, PacketSource, SensorInfo -from ouster.pcap.pcap import MTU_SIZE -import ouster.pcap._pcap as _pcap - - -T = TypeVar('T') - - -def ichunked_before(it: Iterable[T], - pred: Callable[[T], bool]) -> Iterator[Iterator[T]]: - """Return the given stream chunked by the predicate. - - Each sub-iterator will be fully consumed when the next chunk is - requested. No caching of unused items is performed, so client code should - evaluate sub-iterators (e.g. into lists) to avoid dropping items. - - This should behave same as more_itertools.split_before, except that chunks - aren't eagerly evaluated into lists. This makes it safe to use on streams - where it's possible that ``pred`` never evaluates to true. - """ - i = iter(it) - - # flag used by chunks to signal that the underlying iterator is exhausted - done = False - - # first item of the next chunk. See: nonlocal below - try: - t = next(i) - except StopIteration: - return - - def chunk() -> Iterator[T]: - nonlocal done, t - - yield t - for t in i: - if pred(t): - break - else: - yield t - # only if the iterator is exhausted - else: - done = True - - while not done: - c = chunk() - yield c - consume(c) - - -def ichunked_framed( - packets: Iterable[Packet], - pred: Callable[[Packet], - bool] = lambda _: True) -> Iterator[Iterator[Packet]]: - """Delimit a packets when the frame id changes and pred is true.""" - - last_f_id = -1 - - def frame_boundary(p: Packet) -> bool: - nonlocal last_f_id - if isinstance(p, LidarPacket): - f_id = p.frame_id - changed = last_f_id != -1 and f_id != last_f_id - last_f_id = f_id - return changed and pred(p) - return False - - return ichunked_before(packets, frame_boundary) - - -def n_frames(packets: Iterable[Packet], n: int) -> Iterator[Packet]: - for i, frame in enumerate(ichunked_framed(packets)): - if i < n: - yield from frame - else: - break - - -class RecordingPacketSource: - # TODO: deduplicate this & pcap.record - def __init__(self, - source: PacketSource, - output_directory: str, - *, - prefix: str = "", - n_seconds: float = 0.0, - n_frames: Optional[int], - chunk_size: int = 0, - src_ip: str = "127.0.0.1", - dst_ip: str = "127.0.0.1", - lidar_port: int = 7502, - imu_port: int = 7503, - use_sll_encapsulation: bool = False): - self.source = source - self.output_directory = output_directory - self.prefix = prefix - self.n_seconds = n_seconds - self.n_frames = n_frames - self.chunk_size = chunk_size - self.src_ip = src_ip - self.dst_ip = dst_ip - self.lidar_port = lidar_port - self.imu_port = imu_port - self.use_sll_encapsulation = use_sll_encapsulation - - @property - def metadata(self) -> SensorInfo: - """Return metadata from the underlying PacketSource.""" - return self.source.metadata - - def close(self) -> None: - """Close the underlying PacketSource.""" - self.source.close() - - def __iter__(self) -> Iterator[Packet]: - has_timestamp = None - error = False - n = 0 - file_timestamp = datetime.now().strftime("%Y%m%d_%H%M%S") - metadata = self.source.metadata - base_name = f"{self.prefix}{metadata.prod_line}_{metadata.fw_rev}_{metadata.mode}_{file_timestamp}" - - last_f_id = -1 - - def frame_boundary(p: Packet) -> bool: - nonlocal last_f_id - if isinstance(p, LidarPacket): - f_id = p.frame_id - changed = last_f_id != -1 and f_id != last_f_id - last_f_id = f_id - return changed - return False - - try: - start_time = time.time() - chunk = 0 - num_frames = 0 - pcap_path = os.path.join(self.output_directory, base_name) + f"-{chunk:03}.pcap" - handle = _pcap.record_initialize(pcap_path, MTU_SIZE, - self.use_sll_encapsulation) - for packet in self.source: - if isinstance(packet, LidarPacket): - src_port = self.lidar_port - dst_port = self.lidar_port - elif isinstance(packet, ImuPacket): - src_port = self.imu_port - dst_port = self.imu_port - else: - raise ValueError("Unexpected packet type") - - if has_timestamp is None: - has_timestamp = (packet.capture_timestamp is not None) - elif has_timestamp != (packet.capture_timestamp is not None): - raise ValueError("Mixing timestamped/untimestamped packets") - - ts = packet.capture_timestamp or time.time() - _pcap.record_packet(handle, self.src_ip, self.dst_ip, src_port, dst_port, packet._data, ts) - if frame_boundary(packet): - num_frames += 1 - if self.chunk_size and os.path.getsize(pcap_path) > self.chunk_size * 2**20: - # file size exceeds chunk size; create a new chunk - chunk += 1 - pcap_path = os.path.join(self.output_directory, base_name) + f"-{chunk:03}.pcap" - _pcap.record_uninitialize(handle) - handle = _pcap.record_initialize(pcap_path, MTU_SIZE, - self.use_sll_encapsulation) - if (self.n_frames and num_frames > self.n_frames) or \ - (self.n_seconds and time.time() - start_time > self.n_seconds): - break - n += 1 - yield packet - except Exception: - error = True - raise - finally: - _pcap.record_uninitialize(handle) - if error and os.path.exists(pcap_path) and n == 0: - os.remove(pcap_path) diff --git a/python/src/ouster/viz/__init__.py b/python/src/ouster/viz/__init__.py index 2d2d5d7a..45ccb5bb 100644 --- a/python/src/ouster/viz/__init__.py +++ b/python/src/ouster/viz/__init__.py @@ -1,36 +1,5 @@ -""" -Copyright (c) 2023, Ouster, Inc. -All rights reserved. - -Ouster Visualizer (aka PointViz and tools) -""" # flake8: noqa (unused imports) -from ._viz import PointViz -from ._viz import Cloud -from ._viz import Image -from ._viz import Cuboid -from ._viz import Label -from ._viz import WindowCtx -from ._viz import Camera -from ._viz import TargetDisplay -from ._viz import add_default_controls -from ._viz import calref_palette -from ._viz import spezia_palette -from ._viz import grey_palette -from ._viz import viridis_palette -from ._viz import magma_palette - -from .core import push_point_viz_handler -from .core import LidarScanViz -from .core import SimpleViz -from .core import scans_accum_for_cli -from .view_mode import ImageMode -from .view_mode import CloudMode -from .view_mode import ImageCloudMode -from .core import CloudPaletteItem -from .core import VizExtraMode - -from .util import AxisWithLabel - -from .scans_accum import ScansAccumulator \ No newline at end of file +print("warning: the ouster.viz module has been moved to ouster.sdk.viz, " + "please use the new path to avoid this warning.") +from ouster.sdk.viz import * diff --git a/python/tests/conftest.py b/python/tests/conftest.py index d43bd3cf..692126fa 100644 --- a/python/tests/conftest.py +++ b/python/tests/conftest.py @@ -11,16 +11,16 @@ from more_itertools import partition import pytest -from ouster import client, pcap +from ouster.sdk import client, pcap -pytest.register_assert_rewrite('ouster.client._digest') -import ouster.client._digest as digest # noqa +pytest.register_assert_rewrite('ouster.sdk.client._digest') +import ouster.sdk.client._digest as digest # noqa _has_mapping = False try: from ouster.cli.plugins import cli_mapping # type: ignore # noqa: F401 # yes... it has to be in this order. - _has_mapping = True + _has_mapping = False # NOTE: temporarily disabled due to CLI chaining -- Tim T. except ImportError: pass diff --git a/python/tests/osf/test_osf_basics.py b/python/tests/osf/test_osf_basics.py index 453578d1..55fcf66d 100644 --- a/python/tests/osf/test_osf_basics.py +++ b/python/tests/osf/test_osf_basics.py @@ -1,9 +1,15 @@ import pytest import numpy as np +import hashlib +import shutil +import os from more_itertools import ilen +from ouster.sdk import open_source -import ouster.osf as osf +import ouster.sdk.osf as osf +import ouster.sdk.client as client +from ouster.sdk.osf._osf import LidarScanStream @pytest.fixture @@ -11,6 +17,140 @@ def input_osf_file(test_data_dir): return test_data_dir / "osfs" / "OS-1-128_v2.3.0_1024x10_lb_n3.osf" +@pytest.fixture +def input_info(test_data_dir): + filename = test_data_dir / "pcaps" / "OS-0-128-U1_v2.3.0_1024x10.json" + with open(filename, 'r') as f: + data = f.read() + return client.SensorInfo(data) + + +def test_osf_scan_source_flags(input_osf_file): + from ouster.sdk.client import ChanField + ss = open_source(str(input_osf_file), sensor_idx=0, flags=False) + assert ss.fields.get(ChanField.FLAGS) is None + ss = open_source(str(input_osf_file), sensor_idx=0) + assert ss.fields.get(ChanField.FLAGS) is not None + + +# Test that we can save a subset of scan fields and that it errors +# if you try and save a scan missing fields in the metadata +def test_writer_quick(tmp_path, input_info): + file_name = tmp_path / "test.osf" + save_fields = {} + save_fields[client.ChanField.REFLECTIVITY] = np.uint32 + save_fields[client.ChanField.RANGE] = np.uint32 + + error_fields = {} + error_fields[client.ChanField.RANGE] = np.uint32 + with osf.Writer(str(file_name), input_info, save_fields) as writer: + scan = client.LidarScan(128, 1024) + scan.field(client.ChanField.REFLECTIVITY)[:] = 123 + scan.field(client.ChanField.RANGE)[:] = 5 + + writer.save(0, scan) + + # also try saving an scan with missing fields + scan2 = client.LidarScan(128, 1024, error_fields) + scan2.field(client.ChanField.RANGE)[:] = 6 + + with pytest.raises(ValueError): + writer.save(0, scan2) + + writer.close() + + # then open it and double check that we only got the fields we needed + res_reader = osf.Reader(str(file_name)) + + messages = [it for it in res_reader.messages()] + for msg in messages: + if msg.of(LidarScanStream): + ls = msg.decode() + if ls: + # validate that it only has the channels we added + fields = [field for field in ls.fields] + assert client.ChanField.RANGE in fields + assert client.ChanField.REFLECTIVITY in fields + assert len(fields) == 2 + + assert len(messages) == 1 + + +def writer_output_handler(writer, output_osf_file, info): + assert writer.filename() == str(output_osf_file) + assert writer.sensor_info_count() == 1 + assert info == writer.sensor_info(0) + assert info == writer.sensor_info()[0] + + scan1 = client.LidarScan(128, 1024) + assert scan1 is not None + scan1.status[:] = 0x1 + scan1.field(client.ChanField.REFLECTIVITY)[:] = 100 + + scan2 = client.LidarScan(128, 1024) + assert scan2 is not None + scan2.status[:] = 0x1 + scan2.field(client.ChanField.REFLECTIVITY)[:] = 200 + + writer.save(0, scan1) + writer.save([scan2]) + + return (scan1, scan2) + + +def writer_input_handler(scan1, scan2, output_osf_file): + assert scan1 is not None + assert scan2 is not None + assert scan1 != scan2 + + assert output_osf_file.exists() + res_reader = osf.Reader(str(output_osf_file)) + + messages = [it for it in res_reader.messages()] + assert len(messages) == 2 + + read_scan1 = messages[0].decode() + assert read_scan1 is not None + read_scan2 = messages[1].decode() + assert read_scan2 is not None + assert read_scan1 != read_scan2 + + assert read_scan1 == scan1 + assert read_scan2 == scan2 + count = 0 + for _ in res_reader.messages(): + count += 1 + assert count == 2 + + +def test_osf_basic_writer(tmp_path, input_info): + output_osf_file = tmp_path / "out_basic.osf" + + writer = osf.Writer(str(output_osf_file), input_info) + scan1, scan2 = writer_output_handler(writer, output_osf_file, input_info) + assert scan1 is not None + assert scan2 is not None + + assert not writer.is_closed() + writer.close() + assert writer.is_closed() + + assert scan1 is not None + assert scan2 is not None + + writer_input_handler(scan1, scan2, output_osf_file) + + +def test_osf_with_writer(tmp_path, input_info): + output_osf_file = tmp_path / "out_with.osf" + + with osf.Writer(str(output_osf_file), input_info) as writer: + scan1, scan2 = writer_output_handler( + writer, output_osf_file, input_info) + + writer_input_handler(scan1, scan2, output_osf_file) + + def test_osf_save_message(tmp_path, input_osf_file): output_osf_file = tmp_path / "out.osf" @@ -18,18 +158,19 @@ def test_osf_save_message(tmp_path, input_osf_file): lidar_meta = reader.meta_store.get(osf.LidarSensor) lidar_stream_meta = reader.meta_store.get(osf.LidarScanStream) - writer = osf.Writer(str(output_osf_file), reader.id) - lidar_id = writer.addMetadata(lidar_meta) - lidar_stream_id = writer.addMetadata(lidar_stream_meta) + writer = osf.Writer(str(output_osf_file)) + writer.set_metadata_id(reader.metadata_id) + lidar_id = writer.add_metadata(lidar_meta) + lidar_stream_id = writer.add_metadata(lidar_stream_meta) # WARNING: # This is a test of low-level saveMessage API that directly writes # any buffers into the message, users usually really don't want to use it # directly. Unless one is creating custom messages. - writer.saveMessage(lidar_id, 1, bytes([0, 1, 2, 3, 4])) - writer.saveMessage(lidar_id, 2, bytearray([0, 1, 2, 3, 4])) - writer.saveMessage(lidar_id, 3, [0, 1, 2, 3, 4]) - writer.saveMessage(lidar_id, 4, np.array([0, 1, 2, 3, 4], dtype=np.uint8)) + writer.save_message(lidar_id, 1, bytes([0, 1, 2, 3, 4])) + writer.save_message(lidar_id, 2, bytearray([0, 1, 2, 3, 4])) + writer.save_message(lidar_id, 3, [0, 1, 2, 3, 4]) + writer.save_message(lidar_id, 4, np.array([0, 1, 2, 3, 4], dtype=np.uint8)) # WARNING: It's all not safe to do, but because it's a test we know what # we are doing here @@ -38,7 +179,7 @@ def test_osf_save_message(tmp_path, input_osf_file): # pass LidarScan messages as is to a writer if msg.id == lidar_stream_meta.id: total_ls_cnt += 1 - writer.saveMessage(lidar_stream_id, msg.ts, msg.buffer) + writer.save_message(lidar_stream_id, msg.ts, msg.buffer) writer.close() @@ -92,3 +233,49 @@ def test_osf_messages(input_osf_file): assert reader.has_message_idx assert reader.ts_by_message_idx(lidar_stream.id, 0) > 0 + + +def _get_file_hash(file_name): + result = hashlib.sha512() + with open(file_name, "rb") as f: + result.update(f.read()) + + return result.hexdigest() + + +def test_osf_metadata_replacement_tools(tmp_path, input_osf_file): + new_metadata = client.SensorInfo.from_default( + client.LidarMode.MODE_1024x10) + + test_path = os.path.join(tmp_path, "test.osf") + backup_path = os.path.join(tmp_path, "test.bak") + + shutil.copyfile(str(input_osf_file), test_path) + assert os.path.exists(test_path) + assert os.stat(test_path).st_size == os.stat(str(input_osf_file)).st_size + hash1 = _get_file_hash(test_path) + + metadata1 = osf.dump_metadata(test_path) + assert not os.path.exists(backup_path) + osf.backup_osf_file_metablob(test_path, backup_path) + assert os.path.exists(backup_path) + + osf.osf_file_modify_metadata(test_path, [new_metadata]) + + hash2 = _get_file_hash(test_path) + + assert hash1 != hash2 + + osf.restore_osf_file_metablob(test_path, backup_path) + hash3 = _get_file_hash(test_path) + assert hash2 != hash3 + metadata3 = osf.dump_metadata(test_path) + assert metadata1 == metadata3 + + +def test_empty_osf_loop(test_data_dir): + source = open_source( + str(test_data_dir / "osfs" / "empty_osf.osf"), cycle=True) + + with pytest.raises(StopIteration): + next(iter(source)) diff --git a/python/tests/osf/test_osf_extrinsics.py b/python/tests/osf/test_osf_extrinsics.py index c03c7ee6..10671284 100644 --- a/python/tests/osf/test_osf_extrinsics.py +++ b/python/tests/osf/test_osf_extrinsics.py @@ -1,7 +1,7 @@ import pytest import numpy as np -import ouster.osf as osf +import ouster.sdk.osf as osf @pytest.fixture @@ -20,8 +20,8 @@ def sensor_metadata(input_osf_file): def test_osf_save_extrinsics(tmp_path, sensor_metadata): output_osf_file = tmp_path / "out.osf" - writer = osf.Writer(str(output_osf_file), "testing extrinsics") - lidar_id = writer.addMetadata(osf.LidarSensor(sensor_metadata)) + writer = osf.Writer(str(output_osf_file)) + lidar_id = writer.add_metadata(osf.LidarSensor(sensor_metadata)) ext_mat = np.eye(4) # some translation @@ -31,7 +31,7 @@ def test_osf_save_extrinsics(tmp_path, sensor_metadata): ext_mat[2, 1] = np.sin(np.pi / 8) ext_mat[1, 2] = -ext_mat[2, 1] - writer.addMetadata(osf.Extrinsics(ext_mat, lidar_id, "test_calibrated")) + writer.add_metadata(osf.Extrinsics(ext_mat, lidar_id, "test_calibrated")) writer.close() diff --git a/python/tests/test_batching.py b/python/tests/test_batching.py index de525a44..edd2614e 100644 --- a/python/tests/test_batching.py +++ b/python/tests/test_batching.py @@ -11,9 +11,9 @@ import numpy as np import pytest -from ouster import client -from ouster.client._client import ScanBatcher -from ouster.sdkx.parsing import PacketFormat, ColHeader +from ouster.sdk import client +from ouster.sdk.client._client import ScanBatcher +from ouster.sdk.util import PacketFormat, ColHeader def _patch_frame_id(packet: client.LidarPacket, fid: int) -> None: diff --git a/python/tests/test_cli.py b/python/tests/test_cli.py index 42ee7497..df124b12 100644 --- a/python/tests/test_cli.py +++ b/python/tests/test_cli.py @@ -1,5 +1,6 @@ # type: ignore import os +from glob import glob from pathlib import Path import pytest import sys @@ -10,19 +11,27 @@ from ouster.cli import core from ouster.cli.core.cli_args import CliArgs -from ouster.cli.plugins.io_type import io_type_from_extension, io_type_from_magic, OusterIoType -from ouster.cli.plugins import source, discover, source_osf # noqa: F401 -import ouster.pcap +from ouster.cli.plugins import source, source_osf # noqa: F401 +from ouster.sdk.io_type import io_type_from_extension, OusterIoType from tests.conftest import PCAPS_DATA_DIR, OSFS_DATA_DIR -has_magic = False -try: - import magic # noqa: F401 - has_magic = True -except ImportError as e: - print(e) +class set_directory(object): + """Sets the cwd within the context + + Args: + path (Path): The path to the cwd + """ + def __init__(self, path: Path): + self.path = path + self.origin = Path().absolute() + + def __enter__(self): + os.chdir(self.path) + + def __exit__(self, exc_type, exc_value, traceback): + os.chdir(self.origin) @pytest.fixture @@ -49,7 +58,7 @@ def read_commands_from_help_text(help_text: str) -> List[str]: """Reads the command names (but not their help text) from help text that Click generates for click.MultiCommand""" command_help_lines = help_text.split("Commands:")[1].splitlines()[1:] - return [line.split()[0].strip() for line in command_help_lines] + return set(line.split()[0].strip() for line in command_help_lines) def test_join_with_conjunction(): @@ -94,24 +103,13 @@ def test_cli_args_borg_2() -> None: assert not CliArgs().has_any_of(['a', 'b']) -@pytest.mark.skipif(not has_magic, reason="didn't have the magic.") -def test_io_type_from_magic(test_osf_file, test_pcap_file) -> None: - # magic doesn't know OSF - assert io_type_from_magic(test_osf_file) is None - assert io_type_from_magic(test_pcap_file) == OusterIoType.PCAP - # test_bag_file = BAG - # assert io_type_from_magic(test_bag_file) is None - with pytest.raises(FileNotFoundError): - io_type_from_magic('doesntexist') - - def test_io_type_from_extension() -> None: test_osf_name = 'OS1-inters-n5.osf' assert io_type_from_extension(test_osf_name) == OusterIoType.OSF test_pcap_name = 'data-inters-24784-OS1_128_fw23_legacy_n3.pcap' assert io_type_from_extension(test_pcap_name) == OusterIoType.PCAP test_bag_name = 'OS1_128_sample_fw23_lb_n3.bag' - assert io_type_from_extension(test_bag_name) == OusterIoType.ROSBAG + assert io_type_from_extension(test_bag_name) == OusterIoType.BAG def test_version(runner) -> None: @@ -124,23 +122,14 @@ def test_help(runner) -> None: result = runner.invoke(core.cli, ['--help']) assert result.exit_code == 0 - result = runner.invoke(core.cli, ['pcap', '--help']) - assert result.exit_code == 0 - - result = runner.invoke(core.cli, ['sensor', '--help']) - assert result.exit_code == 0 - result = runner.invoke(core.cli, CliArgs(['util', '--help']).args) assert "Usage: cli util [OPTIONS] COMMAND [ARGS]" in result.output assert result.exit_code == 0 - result = runner.invoke(core.cli, ['osf', '--help']) + result = runner.invoke(core.cli, ['--traceback', 'util']) assert result.exit_code == 0 - result = runner.invoke(core.cli, ['--traceback', 'osf']) - assert result.exit_code == 0 - - result = runner.invoke(core.cli, ['--sdk-log-level', 'debug', 'osf']) + result = runner.invoke(core.cli, ['--sdk-log-level', 'debug', 'util']) assert result.exit_code == 0 @@ -148,8 +137,6 @@ def test_mapping_help(runner, has_mapping): result = runner.invoke(core.cli, ['mapping', '--help']) if has_mapping: assert result.exit_code == 0 - else: - assert result.exit_code == 2 def test_source_help(runner) -> None: @@ -157,7 +144,7 @@ def test_source_help(runner) -> None: result = runner.invoke(core.cli, CliArgs(['source', '--help']).args) # check that a variety of SOURCE commands are in the output - assert "PCAP info" in result.output + assert "PCAP|OSF info" in result.output assert "SENSOR config" in result.output # check that general message is there @@ -179,21 +166,21 @@ def test_source_sensor(runner, has_mapping) -> None: # sensor result = runner.invoke(core.cli, ['source', '127.0.0.1']) assert result.exit_code == 0 - expected_commands = ['config', 'metadata', 'record', 'viz'] + expected_commands = {'config', 'metadata', 'viz', 'slice', 'save'} if has_mapping: - expected_commands.append('slam') - assert read_commands_from_help_text(result.output) == expected_commands + expected_commands.add('slam') + assert set(read_commands_from_help_text(result.output)) >= expected_commands def test_source_pcap(runner, has_mapping) -> None: # pcap - expected_commands = ['convert', 'info', 'slice', 'viz'] + expected_commands = {'info', 'slice', 'viz'} if has_mapping: - expected_commands.append('slam') + expected_commands.add('slam') with tempfile.NamedTemporaryFile(suffix='.pcap') as temp_pcap: result = runner.invoke(core.cli, ['source', temp_pcap.name]) assert result.exit_code == 0 - assert read_commands_from_help_text(result.output) == expected_commands + assert read_commands_from_help_text(result.output) >= expected_commands @pytest.mark.skip @@ -292,15 +279,15 @@ def test_source_pcap_slice_help(test_pcap_file, runner): """ouster-cli source slice --help should display help""" result = runner.invoke(core.cli, ['source', test_pcap_file, 'slice', '--help']) - assert "Usage: cli source SOURCE slice [OPTIONS] OUTPUT" in result.output + assert "Usage: cli source SOURCE slice [OPTIONS] INDICES" in result.output assert result.exit_code == 0 -def test_source_pcap_slice_no_output(test_pcap_file, runner): +def test_source_pcap_slice_no_arguments(test_pcap_file, runner): # help option not provided, but no output file provided result = runner.invoke(core.cli, ['source', test_pcap_file, 'slice']) - assert "Usage: cli source SOURCE slice [OPTIONS] OUTPUT" in result.output - assert "Missing argument 'OUTPUT'." in result.output + assert "Usage: cli source SOURCE slice [OPTIONS] INDICES" in result.output + assert "Missing argument 'INDICES'." in result.output assert result.exit_code == 2 @@ -308,51 +295,91 @@ def test_source_pcap_slice_help_2(test_pcap_file, runner): """ouster-cli source slice --help should display help""" result = runner.invoke(core.cli, ['source', test_pcap_file, 'slice', 'outputfile.pcap', '--help']) - assert result.exit_code == 0 - assert "Usage: cli source SOURCE slice [OPTIONS] OUTPUT" in result.output + assert result.exit_code == 2 + assert "Error: Invalid value for 'INDICES'" in result.output -def test_source_pcap_slice(test_pcap_file, runner): - """Slicing a pcap should succeed with exit code 0.""" +def source_pcap_slice_impl(test_pcap_file, runner, command, packets): try: - with tempfile.NamedTemporaryFile(suffix='.pcap', delete=False) as f: + with tempfile.NamedTemporaryFile(delete=False) as f: pass - result = runner.invoke(core.cli, ['source', test_pcap_file, 'slice', '-n', 1, f.name]) - assert f'Writing: {f.name}' in result.output + result = runner.invoke(core.cli, ['source', test_pcap_file, 'slice', + command, 'save', '-p', f.name, ".pcap"]) + # FIXME! Written file paths should be logged in output. + # assert f'Writing: {f.name}' in result.output assert result.exit_code == 0 - result2 = runner.invoke(core.cli, ['source', f.name, 'info']) + pcaps_generated = glob(f'{f.name}_*.pcap') + assert len(pcaps_generated) == 1 + pcap_filename = pcaps_generated[0] + result2 = runner.invoke(core.cli, ['source', pcap_filename, 'info']) assert result2.exit_code == 0 - assert "Packets read: 74" in result2.output + print(result2.output) + assert "Packets read: " + packets in result2.output finally: - os.unlink(f.name) + json_filename = pcap_filename[:-4] + 'json' + os.unlink(f'{f.name}') + os.unlink(pcap_filename) + os.unlink(json_filename) -def test_source_pcap_convert_no_output_file(test_pcap_file, runner): - result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'convert']).args) - assert "Error: Missing argument 'OUTPUT_FILE'." in result.output - assert result.exit_code == 2 +def test_source_pcap_slice(test_pcap_file, runner): + """Slicing a pcap should succeed with exit code 0.""" + source_pcap_slice_impl(test_pcap_file, runner, "0:1:1", "64") + source_pcap_slice_impl(test_pcap_file, runner, "0:1:2", "64") + source_pcap_slice_impl(test_pcap_file, runner, "0:1:3", "64") + source_pcap_slice_impl(test_pcap_file, runner, "0:2", "64") + source_pcap_slice_impl(test_pcap_file, runner, "0:", "64") + source_pcap_slice_impl(test_pcap_file, runner, "2:", "0") + source_pcap_slice_impl(test_pcap_file, runner, "1::", "0") + source_pcap_slice_impl(test_pcap_file, runner, "1::1", "0") + + +def test_source_pcap_save_no_filename(test_pcap_file, runner, tmp_path): + """It should save an automatically named file with that extension by default.""" + with set_directory(tmp_path): + assert not os.listdir(tmp_path) # no files in output dir + result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'save', '.osf']).args) + assert result.exit_code == 0 + # there's at most one OSF file in output dir + files = os.listdir(tmp_path) + assert len(files) == 1 + assert all(filename.endswith('.osf') for filename in files) -def test_source_pcap_convert_help(test_pcap_file, runner): - """ouster-cli source convert --help - should display help""" - result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'convert', '--help']).args) - assert "Usage: cli source SOURCE convert [OPTIONS] OUTPUT_FILE" in result.output - assert result.exit_code == 0 +def test_source_pcap_save_filename(test_pcap_file, runner, tmp_path): + """It should save an osf file with the desired name.""" + with set_directory(tmp_path): + assert not os.listdir(tmp_path) # no files in output dir + result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'save', 'test.osf']).args) + assert result.exit_code == 0 + # there's at most one OSF file in output dir + files = os.listdir(tmp_path) + assert len(files) == 1 + assert all(filename == 'test.osf' for filename in files) -def test_source_pcap_convert_help_2(test_pcap_file, runner): - """ouster-cli source convert .osf --help - should display OSF convert help""" - with tempfile.NamedTemporaryFile(suffix='.osf') as f: - result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'convert', f.name, '--help']).args) +def test_source_pcap_save_filename_prefix(test_pcap_file, runner, tmp_path): + """It should save an osf file with the desired name.""" + with set_directory(tmp_path): + assert not os.listdir(tmp_path) # no files in output dir + result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'save', '-p', 'prefix', 'test.osf']).args) + assert result.exit_code == 0 + # there's at most one OSF file in output dir + files = os.listdir(tmp_path) + assert len(files) == 1 + assert all(filename == 'prefix_test.osf' for filename in files) - assert "Usage: cli source SOURCE convert [OPTIONS] OUTPUT_FILE" in result.output - option_names = [option.name.replace('_', '-') for option in source_osf.osf_from_pcap.params] - # check that all the options for the command are present in the help - assert all([option_name in result.output.lower().replace('_', '-') for option_name in option_names]) - assert result.exit_code == 0 +def test_source_pcap_save_no_extension(test_pcap_file, runner, tmp_path): + """It should save an osf file with the desired name.""" + with set_directory(tmp_path): + assert not os.listdir(tmp_path) # no files in output dir + result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'save', 'osf']).args) + assert result.exit_code == 2 + # there's no OSF file in output dir + files = os.listdir(tmp_path) + assert "Must provide a " in result.output + assert len(files) == 0 def test_source_osf_info_help(test_osf_file, runner): @@ -365,17 +392,15 @@ def test_source_osf_info_help(test_osf_file, runner): assert all([option_name in result.output.lower().replace('_', '-') for option_name in option_names]) assert result.exit_code == 0 -# TODO Re-eanble when osf viz is exposed -# def test_source_osf_viz_help(test_osf_file, runner): -# """ouster-cli source .osf viz --help -# should display OSF viz help""" -# result = runner.invoke(core.cli, ['source', test_osf_file, 'viz', '--help']) -# assert "Usage: cli source SOURCE viz [OPTIONS]" in result.output -# option_names = [option.name.replace('_', '-') for option in source_osf.osf_viz.params] -# # 'cycle is deprecated and now hidden -# option_names.remove('cycle') -# assert all([option_name in result.output.lower().replace('_', '-') for option_name in option_names]) -# assert result.exit_code == 0 + +def test_source_osf_viz_help(test_osf_file, runner): + """ouster-cli source .osf viz --help + should display OSF viz help""" + result = runner.invoke(core.cli, ['source', test_osf_file, 'viz', '--help']) + assert "Usage: cli source SOURCE viz [OPTIONS]" in result.output + option_names = [option.name.replace('_', '-') for option in source.source_viz.params] + assert all([option_name in result.output.lower().replace('_', '-') for option_name in option_names]) + assert result.exit_code == 0 # TODO: Uncomment when bag conversion is re-enabled @@ -393,26 +418,6 @@ def test_source_osf_info_help(test_osf_file, runner): # assert result.exit_code == 0 -def test_source_pcap_convert_bad_extension(test_pcap_file, runner): - """ouster-cli source .pcap convert .badextension - should display an error""" - with tempfile.NamedTemporaryFile(suffix='.badextension') as f: - result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'convert', f.name]).args) - assert source.source.commands[OusterIoType.PCAP]['convert'].get_output_type_file_extensions_str( - ) in result.output - assert result.exit_code == 2 - - -def test_source_pcap_convert_bad_extension_2(test_pcap_file, runner): - """ouster-cli source .pcap convert .pcap - should display an error""" - with tempfile.NamedTemporaryFile(suffix='.pcap') as f: - result = runner.invoke(core.cli, CliArgs(['source', test_pcap_file, 'convert', f.name]).args) - assert source.source.commands[OusterIoType.PCAP]['convert'].get_output_type_file_extensions_str( - ) in result.output - assert result.exit_code == 2 - - def test_discover(runner): """ouster-cli discover --help should display discover plugin help.""" @@ -421,14 +426,6 @@ def test_discover(runner): assert result.exit_code == 0 -def test_match_metadata_with_data_stream(test_pcap_file, test_metadata_file): - """It should find the data stream with a destination port that matches the metadata file lidar port""" - all_infos = ouster.pcap._packet_info_stream(test_pcap_file, 0, None, 100) - meta = ouster.client.SensorInfo(open(test_metadata_file).read()) - matched_stream = ouster.cli.core.pcap.match_metadata_with_data_stream(all_infos, meta) - assert matched_stream.dst_port == 7502 - - def test_source_osf(runner, has_mapping) -> None: """It should list the correct commands in the help depending on source type.""" @@ -436,16 +433,16 @@ def test_source_osf(runner, has_mapping) -> None: with tempfile.NamedTemporaryFile(suffix='.osf') as temp_osf: result = runner.invoke(core.cli, ['source', temp_osf.name]) assert result.exit_code == 0 - expected_commands = ['convert', 'info', 'viz'] + expected_commands = {'info', 'viz', 'slice', 'save'} if has_mapping: - expected_commands.append('slam') - assert read_commands_from_help_text(result.output) == expected_commands + expected_commands.add('slam') + assert read_commands_from_help_text(result.output) >= expected_commands -def test_source_osf_info(test_osf_file, runner): - """ouster-cli source .osf info +def test_source_osf_dump(test_osf_file, runner): + """ouster-cli source .osf dump should display OSF metadata""" - result = runner.invoke(core.cli, ['source', test_osf_file, 'info']) + result = runner.invoke(core.cli, ['source', test_osf_file, 'dump']) meta = json.loads(result.output) assert len(meta['metadata']['entries']) == 3 assert 'buffer' in meta['metadata']['entries'][0] @@ -453,10 +450,10 @@ def test_source_osf_info(test_osf_file, runner): assert result.exit_code == 0 -def test_source_osf_info_short(test_osf_file, runner): +def test_source_osf_dump_short(test_osf_file, runner): """ouster-cli source .osf info -s should display OSF metadata in short form""" - result = runner.invoke(core.cli, ['source', test_osf_file, 'info', '-s']) + result = runner.invoke(core.cli, ['source', test_osf_file, 'dump', '-s']) meta = json.loads(result.output) assert len(meta['metadata']['entries']) == 3 assert 'buffer' not in meta['metadata']['entries'][0] diff --git a/python/tests/test_config.py b/python/tests/test_config.py index 9c2f4744..83b2d5c8 100644 --- a/python/tests/test_config.py +++ b/python/tests/test_config.py @@ -9,7 +9,7 @@ import warnings import inspect -from ouster import client +from ouster.sdk import client # all valid values valid_signal_multiplier_values = [0.25, 0.5, 1, 2, 3] @@ -110,8 +110,11 @@ def test_optional_config() -> None: config = client.SensorConfig() # make sure all the values are empty + assert config.accel_fsr is None assert config.azimuth_window is None assert config.lidar_mode is None + assert config.gyro_fsr is None + assert config.min_range_threshold_cm is None assert config.multipurpose_io_mode is None assert config.nmea_baud_rate is None assert config.nmea_in_polarity is None @@ -120,6 +123,7 @@ def test_optional_config() -> None: assert config.operating_mode is None assert config.phase_lock_enable is None assert config.phase_lock_offset is None + assert config.return_order is None assert config.signal_multiplier is None assert config.sync_pulse_out_pulse_width is None assert config.sync_pulse_out_frequency is None @@ -160,6 +164,10 @@ def test_write_config() -> None: config.udp_profile_lidar = client.UDPProfileLidar.PROFILE_LIDAR_LEGACY config.udp_profile_imu = client.UDPProfileIMU.PROFILE_IMU_LEGACY config.columns_per_packet = 8 + config.return_order = client.ReturnOrder.ORDER_FARTHEST_TO_NEAREST + config.gyro_fsr = client.FullScaleRange.FSR_NORMAL + config.accel_fsr = client.FullScaleRange.FSR_EXTENDED + config.min_range_threshold_cm = 30 with pytest.raises(TypeError): config.lidar_mode = 1 # type: ignore @@ -170,9 +178,12 @@ def test_write_config() -> None: @pytest.fixture() def complete_config_string() -> str: complete_config_string = """ - {"azimuth_window": [0, 360000], + {"accel_fsr": "EXTENDED", + "azimuth_window": [0, 360000], "columns_per_packet": 8, + "gyro_fsr": "EXTENDED", "lidar_mode": "1024x10", + "min_range_threshold_cm": 30, "multipurpose_io_mode": "OFF", "nmea_baud_rate": "BAUD_9600", "nmea_ignore_valid_char": 0, @@ -181,6 +192,7 @@ def complete_config_string() -> str: "operating_mode": "NORMAL", "phase_lock_enable": false, "phase_lock_offset": 0, + "return_order": "STRONGEST_TO_WEAKEST", "signal_multiplier": 2, "sync_pulse_in_polarity": "ACTIVE_HIGH", "sync_pulse_out_angle": 360, @@ -201,9 +213,12 @@ def complete_config_string() -> str: def all_different_config_string() -> str: """All different from complete_config_string except for udp_profile_imu""" all_different_config_string = """ - {"azimuth_window": [180000, 360000], + {"accel_fsr": "NORMAL", + "azimuth_window": [180000, 360000], "columns_per_packet": 16, + "gyro_fsr": "NORMAL", "lidar_mode": "512x10", + "min_range_threshold_cm": 0, "multipurpose_io_mode": "INPUT_NMEA_UART", "nmea_baud_rate": "BAUD_115200", "nmea_ignore_valid_char": 1, @@ -212,6 +227,7 @@ def all_different_config_string() -> str: "operating_mode": "STANDBY", "phase_lock_enable": true, "phase_lock_offset": 180000, + "return_order": "NEAREST_TO_FARTHEST", "signal_multiplier": 0.5, "sync_pulse_in_polarity": "ACTIVE_LOW", "sync_pulse_out_angle": 180, @@ -233,8 +249,11 @@ def test_read_config(complete_config_string: str) -> None: config = client.SensorConfig(complete_config_string) # read from string # make sure all the values are correct + assert config.accel_fsr == client.FullScaleRange.FSR_EXTENDED assert config.azimuth_window == (0, 360000) + assert config.gyro_fsr == client.FullScaleRange.FSR_EXTENDED assert config.lidar_mode == client.LidarMode.MODE_1024x10 + assert config.min_range_threshold_cm == 30 assert config.multipurpose_io_mode == client.MultipurposeIOMode.MULTIPURPOSE_OFF assert config.nmea_baud_rate == client.NMEABaudRate.BAUD_9600 assert config.nmea_in_polarity == client.Polarity.POLARITY_ACTIVE_HIGH @@ -243,6 +262,7 @@ def test_read_config(complete_config_string: str) -> None: assert config.operating_mode == client.OperatingMode.OPERATING_NORMAL assert config.phase_lock_enable is False assert config.phase_lock_offset == 0 + assert config.return_order == client.ReturnOrder.ORDER_STRONGEST_TO_WEAKEST assert config.signal_multiplier == 2 assert config.sync_pulse_out_pulse_width == 10 assert config.sync_pulse_out_frequency == 1 @@ -311,7 +331,7 @@ def test_equality_config(complete_config_string: str, all_different_config_strin setattr(copy_config, property_name, property_value) assert copy_config != base_config - assert len(config_properties) == 23, "Don't forget to update tests and the config == operator!" + assert len(config_properties) == 27, "Don't forget to update tests and the config == operator!" def test_copy_config(complete_config_string: str) -> None: diff --git a/python/tests/test_core.py b/python/tests/test_core.py index bf81f37c..26c7f6f5 100644 --- a/python/tests/test_core.py +++ b/python/tests/test_core.py @@ -9,11 +9,12 @@ import numpy as np import pytest -from ouster import client -from ouster.client import ChanField, _LidarPacket, _ImuPacket +from ouster.sdk import client +from ouster.sdk.client import ChanField, _LidarPacket, _ImuPacket +from ouster.sdk.client.core import ClientTimeout -pytest.register_assert_rewrite('ouster.client._digest') -import ouster.client._digest as digest # noqa +pytest.register_assert_rewrite('ouster.sdk.client._digest') +import ouster.sdk.client._digest as digest # noqa @pytest.fixture @@ -24,8 +25,8 @@ def default_meta(): def test_sensor_init(default_meta: client.SensorInfo) -> None: """Initializing a data stream with metadata makes no network calls.""" with closing(client.Sensor("", 0, 0, metadata=default_meta)) as source: - assert source._cli.lidar_port != 0 - assert source._cli.imu_port != 0 + assert source.lidar_port != 0 + assert source.imu_port != 0 def test_sensor_timeout(default_meta: client.SensorInfo) -> None: @@ -49,13 +50,13 @@ def test_sensor_port_in_use(default_meta: client.SensorInfo) -> None: with closing(client.Sensor("", 0, 0, metadata=default_meta)) as s1: with closing( client.Sensor("", - s1._cli.lidar_port, - s1._cli.imu_port, + s1.lidar_port, + s1.imu_port, metadata=default_meta)) as s2: - assert s2._cli.lidar_port != 0 - assert s2._cli.imu_port != 0 - assert s2._cli.lidar_port == s1._cli.lidar_port - assert s2._cli.imu_port == s1._cli.imu_port + assert s2.lidar_port != 0 + assert s2.imu_port != 0 + assert s2.lidar_port == s1.lidar_port + assert s2.imu_port == s1.imu_port def test_sensor_packet(default_meta: client.SensorInfo) -> None: @@ -73,7 +74,7 @@ def test_sensor_packet(default_meta: client.SensorInfo) -> None: data = np.random.randint(255, size=source._pf.lidar_packet_size, dtype=np.uint8) - sock.sendto(data.tobytes(), ("localhost", source._cli.lidar_port)) + sock.sendto(data.tobytes(), ("localhost", source.lidar_port)) packet = next(iter(source)) assert (packet._data == data).all() assert isinstance(packet, _LidarPacket) @@ -81,12 +82,33 @@ def test_sensor_packet(default_meta: client.SensorInfo) -> None: data = np.random.randint(255, size=source._pf.imu_packet_size, dtype=np.uint8) - sock.sendto(data.tobytes(), ("localhost", source._cli.imu_port)) + sock.sendto(data.tobytes(), ("localhost", source.imu_port)) packet = next(iter(source)) assert (packet._data == data).all() assert isinstance(packet, _ImuPacket) +def test_sensor_flush(default_meta: client.SensorInfo) -> None: + with closing( + client.Sensor("", + 0, + 0, + metadata=default_meta, + timeout=1.0, + _flush_before_read=False)) as source: + total_packets_sent = 5 + for i in range(total_packets_sent): + data = np.zeros((source._pf.lidar_packet_size), dtype=np.uint8) + data[:] = i + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.sendto(data.tobytes(), ("localhost", source.lidar_port)) + flushed_packets = 2 + source.flush(flushed_packets) + for i in range(flushed_packets, total_packets_sent): + packet = next(iter(source)) + assert (packet._data == i).all() + + def test_sensor_packet_bad_size(default_meta: client.SensorInfo) -> None: """Check that the client will ignore improperly-sized packets.""" with closing( @@ -97,10 +119,44 @@ def test_sensor_packet_bad_size(default_meta: client.SensorInfo) -> None: timeout=1.0, _flush_before_read=False)) as source: sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) - sock.sendto(b"hello", ("localhost", source._cli.lidar_port)) + + # send packet too small + sock.sendto(b"hello", ("localhost", source.lidar_port)) with pytest.raises(client.ClientTimeout): next(iter(source)) + # send packet too big + data = np.random.randint(255, + size=source._pf.lidar_packet_size + 10, + dtype=np.uint8) + sock.sendto(data.tobytes(), ("localhost", source.lidar_port)) + with pytest.raises(client.ClientTimeout): + next(iter(source)) + + +# TODO: reenable once we figure out CI determinism +@pytest.mark.skip +def test_sensor_overflow(default_meta: client.SensorInfo) -> None: + with closing( + client.Sensor("", + 0, + 0, + buf_size=10, + metadata=default_meta, + timeout=1.0, + _overflow_err=True, + _flush_before_read=False)) as source: + total_packets_sent = 20 + for i in range(total_packets_sent): + data = np.random.randint(255, + size=source._pf.lidar_packet_size, + dtype=np.uint8) + sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) + sock.sendto(data.tobytes(), ("localhost", source.lidar_port)) + with pytest.raises(client.ClientOverflow): + for i in range(total_packets_sent): + next(iter(source)) + def test_scans_simple(packets: client.PacketSource) -> None: """Check that the test data contains exactly one scan.""" @@ -199,19 +255,13 @@ def test_scans_complete(packets: client.PacketSource) -> None: @pytest.mark.parametrize('test_key', ['legacy-2.0']) def test_scans_timeout(packets: client.PacketSource) -> None: - """A zero timeout should deterministically throw. - - TODO: should it, though? - - TWS 20230609: a timeout no longer raises an exception... - instead it stops iteration and sets _timed_out=True. + """A zero timeout should deterministically throw a ClientTimeout. """ scans = client.Scans(packets, timeout=0.0) scans_itr = iter(scans) - with pytest.raises(StopIteration): + with pytest.raises(ClientTimeout): next(scans_itr) - assert scans._timed_out def test_scans_digest(stream_digest, packets: client.PacketSource) -> None: diff --git a/python/tests/test_data.py b/python/tests/test_data.py index 8673ecea..6dd58ae8 100644 --- a/python/tests/test_data.py +++ b/python/tests/test_data.py @@ -13,9 +13,9 @@ import numpy as np import pytest -from ouster import client -from ouster.client import _client -from ouster.pcap import _pcap +from ouster.sdk import client +from ouster.sdk.client import _client +from ouster.sdk.pcap import _pcap from tests.conftest import PCAPS_DATA_DIR @@ -80,15 +80,8 @@ def test_lidar_packet(meta: client.SensorInfo) -> None: assert len( client.ColHeader.__members__) == 5, "Don't forget to update tests!" - assert np.array_equal(p.header(client.ColHeader.TIMESTAMP), np.zeros(w)) assert np.array_equal(p.timestamp, np.zeros(w)) - assert np.array_equal(p.header(client.ColHeader.FRAME_ID), np.zeros(w)) - assert np.array_equal(p.header(client.ColHeader.MEASUREMENT_ID), - np.zeros(w)) assert np.array_equal(p.measurement_id, np.zeros(w)) - assert np.array_equal(p.header(client.ColHeader.ENCODER_COUNT), - np.zeros(w)) - assert np.array_equal(p.header(client.ColHeader.STATUS), np.zeros(w)) assert np.array_equal(p.status, np.zeros(w)) assert p.frame_id == 0 @@ -98,7 +91,7 @@ def test_lidar_packet(meta: client.SensorInfo) -> None: p.field(client.ChanField.REFLECTIVITY)[0] = 1 with pytest.raises(ValueError): - p.header(client.ColHeader.MEASUREMENT_ID)[0] = 1 + p.measurement_id[0] = 1 with pytest.raises(ValueError): p.status[:] = 1 @@ -115,8 +108,6 @@ def test_read_legacy_packet(packet: client.LidarPacket) -> None: assert packet.field(client.ChanField.SIGNAL)[-1, 0] == 6 assert packet.field(client.ChanField.NEAR_IR)[-1, 0] == 13 - assert np.all(np.diff(packet.header(client.ColHeader.FRAME_ID)) == 0) - assert np.all(np.diff(packet.header(client.ColHeader.MEASUREMENT_ID)) == 1) assert np.all(np.diff(packet.timestamp) > 0) assert np.all(np.diff(packet.measurement_id) == 1) assert packet.packet_type == 0 @@ -126,7 +117,6 @@ def test_read_legacy_packet(packet: client.LidarPacket) -> None: assert packet.shot_limiting == 0 assert packet.thermal_shutdown == 0 # in 1024xN mode, the angle between measurements is exactly 88 encoder ticks - assert np.all(np.diff(packet.header(client.ColHeader.ENCODER_COUNT)) == 88) assert np.all(packet.status == 0xffffffff) @@ -138,8 +128,6 @@ def test_read_single_return_packet(packet: client.LidarPacket) -> None: assert packet.field(client.ChanField.SIGNAL)[-1, 0] == 34 assert packet.field(client.ChanField.NEAR_IR)[-1, 0] == 393 - assert np.all(np.diff(packet.header(client.ColHeader.FRAME_ID)) == 0) - assert np.all(np.diff(packet.header(client.ColHeader.MEASUREMENT_ID)) == 1) assert np.all(np.diff(packet.timestamp) > 0) assert np.all(np.diff(packet.measurement_id) == 1) assert packet.packet_type == 1 @@ -150,7 +138,6 @@ def test_read_single_return_packet(packet: client.LidarPacket) -> None: assert packet.thermal_shutdown == 0 # Changes from LEGACY - assert np.all(np.diff(packet.header(client.ColHeader.ENCODER_COUNT)) == 0) assert np.all(packet.status == 0x01) @@ -197,20 +184,6 @@ def test_scan_writeable() -> None: [0, 0, 1, 0], [0, 0, 0, 1]])) -def test_scan_from_native() -> None: - ls = client.LidarScan(1024, 32) - ls2 = client.LidarScan.from_native(ls) - - assert ls is ls2 - - -def test_scan_to_native() -> None: - ls = client.LidarScan(1024, 32) - ls2 = ls.to_native() - - assert ls is ls2 - - def test_scan_field_ref() -> None: """Test that field references keep scans alive.""" @@ -519,6 +492,97 @@ def test_scan_eq_with_custom_fields() -> None: assert ls2 == ls0 +def test_scan_copy_extension() -> None: + """ Verify we can clone a scan and null pad missing desired fields """ + ls0 = client.LidarScan(32, 512, { + client.ChanField.CUSTOM4: np.uint8 + }) + + ls0.field(client.ChanField.CUSTOM4)[:] = 123 + + ls1 = client.LidarScan(ls0, { + client.ChanField.CUSTOM0: np.uint32, + client.ChanField.CUSTOM4: np.uint8 + }) + + assert len(list(ls1.fields)) == 2 + assert np.count_nonzero(ls1.field(client.ChanField.CUSTOM0)[0, 0]) == 0 + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM4) == 123) == ls1.h * ls1.w + + +def test_scan_copy_retraction() -> None: + """ Verify we can clone a scan and remove undesired fields """ + ls0 = client.LidarScan(32, 512, { + client.ChanField.CUSTOM0: np.uint32, + client.ChanField.CUSTOM4: np.uint8 + }) + + ls0.field(client.ChanField.CUSTOM0)[:] = 100 + ls0.field(client.ChanField.CUSTOM4)[:] = 123 + + ls1 = client.LidarScan(ls0, { + client.ChanField.CUSTOM0: np.uint32, + }) + + assert ls0.h == ls1.h + assert ls0.w == ls1.w + + assert len(list(ls1.fields)) == 1 + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM0) == 100) == ls1.h * ls1.w + with pytest.raises(ValueError): + ls1.field(client.ChanField.CUSTOM4)[0, 0] == 100 + + +def test_scan_copy_cast() -> None: + """ Verify we can clone a scan and cast between field types """ + ls0 = client.LidarScan(32, 512, { + client.ChanField.CUSTOM0: np.uint32, + client.ChanField.CUSTOM4: np.uint8 + }) + + ls0.field(client.ChanField.CUSTOM0)[:] = 2 ** 16 - 1 + ls0.field(client.ChanField.CUSTOM4)[:] = 255 + + ls1 = client.LidarScan(ls0, { + client.ChanField.CUSTOM0: np.uint8, + client.ChanField.CUSTOM4: np.uint16 + }) + + assert ls0.h == ls1.h + assert ls0.w == ls1.w + + assert len(list(ls1.fields)) == 2 + assert ls1.field(client.ChanField.CUSTOM0).dtype == np.uint8 + assert ls1.field(client.ChanField.CUSTOM4).dtype == np.uint16 + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM0) == 255) == ls1.h * ls1.w + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM4) == 255) == ls1.h * ls1.w + + +def test_scan_copy() -> None: + ls0 = client.LidarScan(32, 512, { + client.ChanField.CUSTOM0: np.uint32, + client.ChanField.CUSTOM4: np.uint8 + }) + + ls0.field(client.ChanField.CUSTOM0)[:] = 100 + ls0.field(client.ChanField.CUSTOM4)[:] = 123 + + ls1 = client.LidarScan(ls0) + + assert ls0.h == ls1.h + assert ls0.w == ls1.w + + assert len(list(ls1.fields)) == 2 + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM0) == 100) == ls1.h * ls1.w + assert np.count_nonzero( + ls1.field(client.ChanField.CUSTOM4) == 123) == ls1.h * ls1.w + + def test_error_eq() -> None: assert client.PacketSizeError("abc") == client.PacketSizeError("abc") @@ -679,10 +743,10 @@ def test_packet_writer_bindings(meta: client.SensorInfo) -> None: ls = client.LidarScan(pf.pixels_per_column, columns_per_frame, pf.udp_profile_lidar, pf.columns_per_packet) # all fields are invalid, expect zero packets - packets = _client.scan_to_packets(ls, pw) + packets = _client.scan_to_packets(ls, pw, 0, 0) assert len(packets) == 0 expected_packets = columns_per_frame / pf.columns_per_packet ls.status[:] = 0x1 - packets = _client.scan_to_packets(ls, pw) + packets = _client.scan_to_packets(ls, pw, 0, 0) assert len(packets) == expected_packets diff --git a/python/tests/test_destagger.py b/python/tests/test_destagger.py index 0f6e9267..0a9c18e2 100644 --- a/python/tests/test_destagger.py +++ b/python/tests/test_destagger.py @@ -6,7 +6,7 @@ import numpy as np import pytest -from ouster import client +from ouster.sdk import client from ouster.sdk.examples import reference diff --git a/python/tests/test_discover.py b/python/tests/test_discover.py index fa4de94f..179db42a 100644 --- a/python/tests/test_discover.py +++ b/python/tests/test_discover.py @@ -5,205 +5,141 @@ * All rights reserved. """ -import requests -import ouster.cli.plugins.discover -from ouster.cli.plugins.discover import service_info_as_text_str -import socket -from zeroconf import DNSAddress - - -FAKESERVER = 'fakeserver.' - - -class FakeInfo: - def __init__(self, fake_server, fake_addresses): - self.fake_server = fake_server - self.fake_addresses = fake_addresses - - def dns_addresses(self): - _type = 0 # not important to us - _class = 0 # not important to us - _ttl = 0 # not important to us - return [ - DNSAddress( - self.fake_server, - _type, - _class, - _ttl, - socket.inet_pton(socket.AF_INET6 if ':' in address else socket.AF_INET, address) - ) - for address in self.fake_addresses - ] - - @property - def server(self): - return self.fake_server - - -def test_service_info_as_text_str(monkeypatch): - """It should format correctly even with no addresses in the info.""" - def mock_get(_): - raise RuntimeError("err") - def mock_connect(foo, bar): # NOQA - class MockSocket: - def __init__(self, af=None, sock_type=None): # NOQA - pass - - def connect_ex(self, *arg): # NOQA - return 1 - - def close(self): - pass - return MockSocket() - - with monkeypatch.context() as m: - m.setattr(requests, "get", mock_get) - m.setattr(socket, "socket", mock_connect) - - text, color, error = service_info_as_text_str(FakeInfo(FAKESERVER, [])) - server, address, prod_line, dest_ip, lidar_port, imu_port = text.split() - assert FAKESERVER == server - assert address == '-' - assert prod_line == '-' - - fake_addresses = ["192.168.100.200", "200a:aa8::8a2e:370:1337"] - text, color, error = service_info_as_text_str(FakeInfo(FAKESERVER, fake_addresses)) - server, address, prod_line, dest_ip, lidar_port, imu_port = text.split() - assert FAKESERVER == server - assert address == fake_addresses[0] - assert prod_line == '-' - - -def test_service_info_as_text_str_2(monkeypatch): - """It should format correctly even when sensor metadata can't be retrieved.""" - def mock_get(_): - raise RuntimeError("err") - def mock_connect(foo, bar): # NOQA - class MockSocket: - def __init__(self, af=None, sock_type=None): # NOQA - pass - - def connect_ex(self, *arg): # NOQA - return 1 - - def close(self): - pass - return MockSocket() - with monkeypatch.context() as m: - m.setattr(requests, "get", mock_get) - m.setattr(socket, "socket", mock_connect) - - fake_addresses = ["192.168.100.200", "200a:aa8::8a2e:370:1337"] - text, color, error = service_info_as_text_str(FakeInfo(FAKESERVER, fake_addresses)) - server, address, prod_line, dest_ip, lidar_port, imu_port = text.split() - assert FAKESERVER == server - assert address == fake_addresses[0] - assert prod_line == '-' - - -def test_service_info_as_text_str_3(monkeypatch): - """It should set prod_line when the HTTP response contains it.""" - def mock_get(url): - class MockResponse: - def json(self): - return {'prod_line': 'fake_prod_line'} - return MockResponse() - def mock_connect(foo, bar): # NOQA - class MockSocket: - def __init__(self, af=None, sock_type=None): # NOQA - pass - - def connect_ex(self, *arg): # NOQA - return 0 - - def close(self): - pass - return MockSocket() - - def mock_config(self, *arg): # NOQA - return None - - with monkeypatch.context() as m: - m.setattr(requests, "get", mock_get) - m.setattr(socket, "socket", mock_connect) - m.setattr(ouster.cli.plugins.discover, "_get_config", mock_config) - - fake_addresses = ["192.168.100.200", "200a:aa8::8a2e:370:1337"] - - text, color, error = service_info_as_text_str(FakeInfo(FAKESERVER, fake_addresses)) - server, address, prod_line, dest_ip, lidar_port, imu_port = text.split() - assert FAKESERVER == server - assert address == fake_addresses[0] - assert prod_line == 'fake_prod_line' - - -def test_service_info_as_text_str_4(monkeypatch): - """It should not set prod_line when the HTTP response does not contain it.""" - def mock_get(url): - class MockResponse: - def json(self): - return {} - return MockResponse() - def mock_connect(foo, bar): # NOQA - class MockSocket: - def __init__(self, af=None, sock_type=None): # NOQA - pass - - def connect_ex(self, *arg): - return 0 - - def close(self): - pass - return MockSocket() - - def mock_config(self, *arg): # NOQA - return None - - with monkeypatch.context() as m: - m.setattr(requests, "get", mock_get) - m.setattr(socket, "socket", mock_connect) - m.setattr(ouster.cli.plugins.discover, "_get_config", mock_config) - - fake_addresses = ["192.168.100.200", "200a:aa8::8a2e:370:1337"] - text, color, error = service_info_as_text_str(FakeInfo(FAKESERVER, fake_addresses)) - server, address, prod_line, dest_ip, lidar_port, imu_port = text.split() - assert FAKESERVER == server - assert address == fake_addresses[0] - assert prod_line == '-' - - -def test_ipv6_url(monkeypatch): - """It should produce the correct url""" - test_ipv6_address = "200a:aa8::8a2e:370:1337" - test_url = f"http://[{test_ipv6_address}]/api/v1/sensor/metadata/sensor_info" - - def mock_get(url): - class MockResponse: - def json(self): - assert test_url == url - return None - - return MockResponse() - - def mock_connect(foo, bar): # NOQA - class MockSocket: - def __init__(self, af=None, sock_type=None): # NOQA - pass - - def connect_ex(self, *arg): - return 0 - - def close(self): - pass - return MockSocket() - - def mock_config(self, *arg): # NOQA - return None - - with monkeypatch.context() as m: - m.setattr(requests, "get", mock_get) - m.setattr(socket, "socket", mock_connect) - m.setattr(ouster.cli.plugins.discover, "_get_config", mock_config) - - fake_addresses = [test_ipv6_address] - _, _, _ = service_info_as_text_str(FakeInfo(FAKESERVER, fake_addresses)) +import packaging # TODO remove +import importlib # TODO remove +from sys import version_info +import pytest +import asyncio +from zeroconf import InterfaceChoice, IPVersion +from zeroconf.asyncio import AsyncServiceInfo +from ouster.cli.plugins.discover import\ + parse_scope_id, format_hostname_for_url, \ + get_output_for_sensor, get_text_for_oserror, is_link_local_ipv6_address_and_missing_scope_id, \ + AsyncServiceDiscovery + + +def test_format_hostname_for_url(): + test_addr_ipv4 = '169.254.169.254' + test_addr_ipv6 = '200a:aa8::8a2e:370:1337' + test_hostname = "os-122247000785.local" + assert format_hostname_for_url(test_addr_ipv4) == test_addr_ipv4 + assert format_hostname_for_url(test_addr_ipv6) == f'[{test_addr_ipv6}]' + assert format_hostname_for_url(test_hostname) == test_hostname + + +def test_parse_scope_id(): + # It returns a tuple consisting of the ip address string and an optional integer + # representing the scope id (if present) + test_addr_ipv4 = '169.254.169.254' + test_addr_ipv6 = '200a:aa8::8a2e:370:1337' + assert parse_scope_id(test_addr_ipv6) == (test_addr_ipv6, None) + assert parse_scope_id(f"{test_addr_ipv6}%5") == (test_addr_ipv6, 5) + assert parse_scope_id(f"{test_addr_ipv6}%0") == (test_addr_ipv6, 0) + assert parse_scope_id(f"{test_addr_ipv6}%0") == (test_addr_ipv6, 0) + assert parse_scope_id(test_addr_ipv4) == (test_addr_ipv4, None) + # it raises a ValueError if the scope id is not an integer + with pytest.raises(ValueError): + parse_scope_id(f"{test_addr_ipv6}%invalid") + + +def test_is_link_local_ipv6_address_and_missing_scope_id(): + with pytest.raises(ValueError): + is_link_local_ipv6_address_and_missing_scope_id("notanaddress") + assert not is_link_local_ipv6_address_and_missing_scope_id("10.34.80.17") + assert not is_link_local_ipv6_address_and_missing_scope_id("200a:aa8::8a2e:370:1337") + assert is_link_local_ipv6_address_and_missing_scope_id("fe80:aa8::8a2e:370:1337") + assert not is_link_local_ipv6_address_and_missing_scope_id("fe80:aa8::8a2e:370:1337%2") + + +def test_text_output(): + sensor_json = { + "active_config": { + "udp_dest": "10.34.80.17", + "udp_port_imu": 9503, + "udp_port_lidar": 9003, + }, + "addresses": [ + "10.34.26.98" + ], + "hostname": "os-992343000025.local.", + "network": { + "hostname": "os-992343000025", + "ipv4": { + "addr": "10.34.26.98/24", + "link_local": "169.254.67.1/16", + "override": None + }, + "ipv6": { + "link_local": "fe80::be0f:a7ff:fe00:a992/64" + }, + }, + "sensor_info": { + "image_rev": "ousteros-image-prod-aries-v2.5.2+20230714195410", + "prod_line": "OS-2-128", + "prod_sn": "992343000025", + } + } + text, color = get_output_for_sensor(sensor_json) + assert color == 'white' + assert "OS-2-128 - 992343000025" in text, text + assert "* UDP destination address: 10.34.80.17" in text, text + assert "* IPv4 DHCP 10.34.26.98/24" in text + + +# TODO: remove +def get_text_for_oserror2(error_prefix: str, address: str, e: Exception) -> str: + if "invalid argument" in str(e).lower() and is_link_local_ipv6_address_and_missing_scope_id(address): + zeroconf_version = packaging.version.parse(importlib.metadata.version('zeroconf')) + if version_info < (3, 9): + return f"{error_prefix} - this version of Python does not support scoped \ +link-local IPv6 addresses, which are necessary to retrieve the sensor configuration." + elif zeroconf_version < packaging.version.parse('0.131.0'): + return f"{error_prefix} - the installed version of zeroconf ({zeroconf_version}) \ +may not be able to provide scoped link-local IPv6 addresses, \ +which are necessary to retrieve the sensor configuration.\n" \ + + "Please refer to this GitHub pull request for specifics: \ +https://github.com/python-zeroconf/python-zeroconf/pull/1322" + else: + return f"{error_prefix} - {e}" + else: + return f"{error_prefix} - {e}" + + +def test_get_text_for_oserror(): + from requests.exceptions import ConnectionError + e = ConnectionError("Invalid Argument") + address = "fe80::be0f:a7ff:fe00:a992" + assert "invalid argument" in str(e).lower() + assert is_link_local_ipv6_address_and_missing_scope_id(address) + txt = get_text_for_oserror("prefix", address, e) + txt2 = get_text_for_oserror2("prefix", address, e) + assert txt == txt2 + from sys import version_info # TODO: monkeypatch this or make it a fn parameter + if version_info < (3, 9): + assert "this version of Python does not support scoped link-local IPv6 addresses" in txt + else: + assert txt == "prefix - Invalid Argument" + + +async def create_future_task_for_info(asd): + """a coroutine that creates an AsyncServiceInfo + and submits it to the provided AsyncServiceDiscovery.""" + while not asd.aiozc: + await asyncio.sleep(0.5) + service_type = '_http._tcp.local.' + name = 'bogus_name.local.' + info = AsyncServiceInfo(service_type, name) + info.server = 'server.local.' + await asyncio.sleep(1) + await asd.create_future_task_for_info(info) + + +@pytest.mark.asyncio +async def test_fleetsw_5814(): + """it doesn't raise a RuntimeError due to a future being submitted after executor shutdown""" + timeout = 0.1 + continuous = False + show_user_data = False + asd = AsyncServiceDiscovery(InterfaceChoice.All, IPVersion.All, 'json', timeout, continuous, show_user_data, []) + asd.async_request_timeout_ms = 0 + await asyncio.gather(asd.async_run(), create_future_task_for_info(asd)) diff --git a/python/tests/test_extended_profiles.py b/python/tests/test_extended_profiles.py index d15f4b7a..78d9c89d 100644 --- a/python/tests/test_extended_profiles.py +++ b/python/tests/test_extended_profiles.py @@ -5,9 +5,9 @@ import pytest import numpy as np -from ouster import client -from ouster.client import _client -from ouster.client._client import (ChanField, FieldInfo) +from ouster.sdk import client +from ouster.sdk.client import _client +from ouster.sdk.client._client import (ChanField, FieldInfo) def test_create_field_info() -> None: diff --git a/python/tests/test_forward_slicer.py b/python/tests/test_forward_slicer.py new file mode 100644 index 00000000..e9eb19b7 --- /dev/null +++ b/python/tests/test_forward_slicer.py @@ -0,0 +1,105 @@ +import pytest +from ouster.sdk.util.forward_slicer import ForwardSlicer + + +class ReferenceSliceable: + def __init__(self, data): + self.data = data + + def __len__(self): + return len(self.data) + + def __getitem__(self, key): + return self.data[key] + + +class NormalizedSliceable: + + def __init__(self, data): + self.data = data + + def __len__(self): + return len(self.data) + + def __getitem__(self, key): + if isinstance(key, slice): + k = ForwardSlicer.normalize(key, len(self)) + result = ForwardSlicer.slice(iter(self.data[k.start:k.stop]), k) + return result if k.step > 0 else list(reversed(result)) + else: + return self.data[key] + + +@pytest.fixture +def sliceable_fixture(): + values = [i for i in range(10)] + ref_sliceable = ReferenceSliceable(values) + test_sliceable = NormalizedSliceable(values) + return ref_sliceable, test_sliceable + + +@pytest.mark.parametrize("start, stop, step", [ + (None, None, None), + (None, 8, None), + (3, None, None), + (3, 8, None), + (3, 8, 1), + (3, 8, 2), + (3, 8, 3), + (0, 8, 1), + (3, 10, 1), + (0, 10, 1), + (0, 10, 2), + (0, 10, 3), + (0, 10, 5), + (3, 11, 1), + (3, -1, 1), + (3, -2, 1), + (-7, 8, 1), + (-7, -2, 1), + (8, 3, 1), # invalid range for positive step + (3, 8, -1), # invalid range for negative step + (8, 3, -1), + (8, 3, -2), + (8, 3, -3), + (8, 3, -4), + (5, 3, -3), + (7, 3, -2), + (8, 0, -1), + (10, 3, -1), + (10, 0, -1), + (11, 3, -1), + (-1, 3, -1), + (-2, 3, -1), + (8, -7, -1), + (-2, -7, -1), + (-1, -7, -1), + (-1, 0, -1), + (-1, 0, -2), + (-1, 0, -3), + (9, 0, -1), + (9, 0, -2), + (9, 0, -3), + (10, 0, -1), + (10, 0, -2), + (10, 0, -3), + (11, 0, -1), + (11, 0, -2), + (11, 0, -3), + (9, None, -1), + (9, None, -2), + (9, None, -3), + (None, 3, -1), + (None, 3, -2), + (None, 3, -3), + (None, 0, -1), + (None, 0, -2), + (None, 0, -3), + (None, None, -1), + (None, None, -2), + (None, None, -3), +]) +def test_sliceable(sliceable_fixture, start, stop, step): + ref_sliceable, test_sliceable = sliceable_fixture + ss = slice(start, stop, step) + assert ref_sliceable[ss] == test_sliceable[ss], f"Failed test case with the slice [{start}:{stop}:{step}]" diff --git a/python/tests/test_http_client.py b/python/tests/test_http_client.py index a6b702b6..ca92ed0c 100644 --- a/python/tests/test_http_client.py +++ b/python/tests/test_http_client.py @@ -6,7 +6,7 @@ import pytest import flask from werkzeug.serving import make_server -from ouster.client._client import Client +from ouster.sdk.client._client import Client class ServerThread(threading.Thread): diff --git a/python/tests/test_metadata.py b/python/tests/test_metadata.py index dfa73893..3857f327 100644 --- a/python/tests/test_metadata.py +++ b/python/tests/test_metadata.py @@ -12,7 +12,7 @@ from pathlib import Path from os import path -from ouster import client +from ouster.sdk import client from tests.conftest import METADATA_DATA_DIR, PCAPS_DATA_DIR diff --git a/python/tests/test_open_source.py b/python/tests/test_open_source.py new file mode 100644 index 00000000..fbe6d74b --- /dev/null +++ b/python/tests/test_open_source.py @@ -0,0 +1,61 @@ +import os +from re import escape +import pytest +import tempfile +import ouster.sdk.io_type +from ouster.sdk.util import resolve_metadata_multi +from ouster.sdk import open_source +from tests.conftest import PCAPS_DATA_DIR + + +def test_open_source_empty_source_url(): + """It should raise an error if the src url is the empty string.""" + with pytest.raises(ValueError, match="No valid source specified"): + open_source('') + + +def test_open_source_multi_source_url(): + """It should raise an error if the src url contains commas. + """ + with pytest.raises(NotImplementedError, match="providing more than a single url is current not supported!"): + open_source('a,b') + + +def test_open_source_unsupported_source_type(): + """It raises a NotImplementedError if the source type is not supported.""" + with tempfile.NamedTemporaryFile(suffix='.csv') as f: + with pytest.raises(NotImplementedError, match="The io_type:OusterIoType.CSV is not supported!"): + open_source(f.name) + + +def test_open_source_undetermined_source_type(): + """It raises a RuntimeError if the source type couldn't be determined.""" + with pytest.raises(RuntimeError, match=escape("Failed to create scan_source for url ['unknown source']\n " + "more details: Source type expected to be a sensor hostname, ip address, or a .pcap, .osf, or .bag file.")): + open_source('unknown source') + + +def test_open_source_meta_not_supported(monkeypatch): + """It raises a RuntimeError if the meta keyword is provided to an unsupported source type.""" + with pytest.raises(RuntimeError, match="SensorScanSource does not support user-supplied metadata."): + # monkeypatch io_type to return a OusterIoType.SENSOR + with monkeypatch.context() as m: + m.setattr(ouster.sdk.io_type, "io_type", lambda _: ouster.sdk.io_type.OusterIoType.SENSOR) + open_source('fakesensor', meta='fake_meta.json') + + with pytest.raises(RuntimeError, match="OsfScanSource does not support user-supplied metadata."): + # monkeypatch io_type to return a OusterIoType.SENSOR + with monkeypatch.context() as m: + m.setattr(ouster.sdk.io_type, "io_type", lambda _: ouster.sdk.io_type.OusterIoType.OSF) + open_source('fake.osf', meta='fake_meta.json') + + +def test_open_source_meta_pcap(): + """Providing the meta parameter to open source should override the metadata files used by the PcapScanSource.""" + pcap_file_path = os.path.join(PCAPS_DATA_DIR, 'VLI-16-one-packet.pcap') + json_file_path = os.path.join(PCAPS_DATA_DIR, 'OS-0-128-U1_v2.3.0_1024x10.json') + + # the test file is different than what would be resolved ordinarily + assert json_file_path not in resolve_metadata_multi(pcap_file_path) + src = open_source(pcap_file_path, meta=(json_file_path,)) + assert src._metadata_paths == [json_file_path] diff --git a/python/tests/test_packet_iter.py b/python/tests/test_packet_iter.py index 91b515fc..48d8a71f 100644 --- a/python/tests/test_packet_iter.py +++ b/python/tests/test_packet_iter.py @@ -5,9 +5,10 @@ import pytest -from ouster.client import SensorInfo -from ouster.pcap import Pcap -from ouster.sdkx import packet_iter +from ouster.sdk.client import SensorInfo +from ouster.sdk.pcap import PcapMultiPacketReader +from ouster.sdk.pcap import Pcap +from ouster.sdk.pcap import packet_iter from tests.conftest import PCAPS_DATA_DIR @@ -113,14 +114,14 @@ def test_recording_packet_source(tmp_path) -> None: """It writes packets contained in the source to the output directory.""" meta_file_path = os.path.join(PCAPS_DATA_DIR, 'OS-0-128-U1_v2.3.0_1024x10.json') pcap_file_path = os.path.join(PCAPS_DATA_DIR, 'OS-0-128-U1_v2.3.0_1024x10.pcap') - sensor_info = SensorInfo(open(meta_file_path).read()) - source = Pcap(pcap_file_path, sensor_info) - recording_iter = packet_iter.RecordingPacketSource(source, tmp_path, n_frames=1) + source = PcapMultiPacketReader(pcap_file_path, [meta_file_path]) + recording_iter = packet_iter.RecordingPacketSource(source, str(tmp_path) + "/test", n_frames=1) emitted_packets = 0 - for packet in recording_iter: + for (idx, packet) in recording_iter: emitted_packets += 1 assert emitted_packets == 74 - source = Pcap(pcap_file_path, sensor_info) + + sensor_info = SensorInfo(open(meta_file_path).read()) assert len(os.listdir(tmp_path)) == 1 recording_path = os.path.join(tmp_path, os.listdir(tmp_path)[0]) recorded_pcap = Pcap(recording_path, sensor_info) @@ -137,11 +138,10 @@ def test_recording_packet_source_bad_packet_format(tmp_path) -> None: """ meta_file_path = os.path.join(PCAPS_DATA_DIR, 'OS-0-128-U1_v2.3.0_1024x10.json') pcap_file_path = os.path.join(PCAPS_DATA_DIR, 'VLI-16-one-packet.pcap') - sensor_info = SensorInfo(open(meta_file_path).read()) - sensor_info.udp_port_lidar = 2368 - source = Pcap(pcap_file_path, sensor_info, lidar_port = 2368) - recording_iter = packet_iter.RecordingPacketSource(source, tmp_path, n_frames=1) + source = PcapMultiPacketReader(pcap_file_path, [meta_file_path]) + source.metadata[0].udp_port_lidar = 2368 + recording_iter = packet_iter.RecordingPacketSource(source, str(tmp_path) + "/test", n_frames=1) emitted_packets = 0 - for packet in recording_iter: + for (idx, packet) in recording_iter: emitted_packets += 1 assert emitted_packets == 0 diff --git a/python/tests/test_parsing.py b/python/tests/test_parsing.py index 622cde40..c15d25f7 100644 --- a/python/tests/test_parsing.py +++ b/python/tests/test_parsing.py @@ -1,13 +1,13 @@ # type: ignore import os import numpy as np -import ouster.pcap._pcap as _pcap -from ouster.client import LidarMode, SensorInfo, UDPProfileLidar, ChanField -import ouster.client as client -import ouster.pcap as pcap -from ouster.sdkx.parsing import FusaDualFormat, PacketFormat, default_scan_fields +import ouster.sdk.pcap._pcap as _pcap +from ouster.sdk.client import LidarMode, SensorInfo, UDPProfileLidar, ChanField +import ouster.sdk.client as client +import ouster.sdk.pcap as pcap +from ouster.sdk.util import (FusaDualFormat, PacketFormat, default_scan_fields, + resolve_metadata) from tests.conftest import PCAPS_DATA_DIR -from ouster.sdk.util import resolve_metadata def test_fusa_parsing_profile(): diff --git a/python/tests/test_pcap.py b/python/tests/test_pcap.py index c2c0f0f8..c75432de 100644 --- a/python/tests/test_pcap.py +++ b/python/tests/test_pcap.py @@ -15,10 +15,9 @@ import pytest import time -from ouster import pcap -from ouster.pcap import _pcap -from ouster import client -from ouster.client import _client +from ouster.sdk import client, pcap, open_source +from ouster.sdk.pcap import _pcap +from ouster.sdk.client import _client from tests.conftest import PCAPS_DATA_DIR, TESTS from tests.test_batching import _patch_frame_id @@ -645,3 +644,12 @@ def test_legacy_reduced_json_data(): packet_source = pcap.Pcap(pcap_file_path, metadata) scans = client.Scans(packet_source) assert 1 == sum(1 for _ in scans) + + +def test_empty_pcap_loop(): + pcap_file_path = path.join(PCAPS_DATA_DIR, 'empty_pcap.pcap') + + source = open_source(pcap_file_path, cycle=True) + + with pytest.raises(StopIteration): + next(iter(source)) diff --git a/python/tests/test_plugins.py b/python/tests/test_plugins.py index 5a912e11..f76bba14 100644 --- a/python/tests/test_plugins.py +++ b/python/tests/test_plugins.py @@ -12,9 +12,7 @@ def test_find_plugins(capsys, has_mapping): built_in_plugins = [ 'ouster.cli.plugins.discover', - 'ouster.cli.plugins.io_type', 'ouster.cli.plugins.source', - 'ouster.cli.plugins.source_osf', 'ouster.cli.plugins.testing', 'ouster.cli.plugins.bad_plugin', ] diff --git a/python/tests/test_pose_util.py b/python/tests/test_pose_util.py index e49686ae..e47919cc 100644 --- a/python/tests/test_pose_util.py +++ b/python/tests/test_pose_util.py @@ -8,7 +8,7 @@ import pytest -import ouster.sdk.pose_util as pu +import ouster.sdk.util.pose_util as pu def gt_pose6toHomMatrix(vec: np.ndarray) -> np.ndarray: @@ -112,10 +112,10 @@ def test_traj_pose_interp(poses6: List[pu.Pose6]): print(f"Summary times for {len(ts)} poses calc:") print(f" pose_interp() ........ one by one : {t_pose_interp:.08f} s") print(f" traj_eval.pose_at() .. one by one : {t_pose_te_one_by_one:.08f} s " - f"({t_pose_interp/t_pose_te_one_by_one:.02f}x)") + f"({t_pose_interp / t_pose_te_one_by_one:.02f}x)") vec_speedup = "" if t_pose_te_vec > 0: - vec_speedup = f"({t_pose_te_one_by_one/t_pose_te_vec:.02f}x)" + vec_speedup = f"({t_pose_te_one_by_one / t_pose_te_vec:.02f}x)" else: vec_speedup = "(FTL)" # Faster Than Light print(f" traj_eval.poses_at() . vectorised : {t_pose_te_vec:.08f} s " diff --git a/python/tests/test_resolve_extrinsics.py b/python/tests/test_resolve_extrinsics.py new file mode 100644 index 00000000..4125c235 --- /dev/null +++ b/python/tests/test_resolve_extrinsics.py @@ -0,0 +1,112 @@ +from os import path +import numpy as np +from ouster.sdk.client import SensorInfo +from ouster.sdk.util import resolve_extrinsics +from ouster.sdk import open_source + +PCAP_WITH_NO_EXT_DATA_DIR = path.join(path.dirname( + path.abspath(__file__)), "../../tests/pcap_without_extrinsics") + +PCAP_PATH_WITH_NO_EXT = path.join( + PCAP_WITH_NO_EXT_DATA_DIR, "OS-0-128-U1_v2.3.0_10.pcap") + +PCAP_WITH_EXT_DATA_DIR = path.join(path.dirname( + path.abspath(__file__)), "../../tests/pcap_with_extrinsics") + +PCAP_PATH_WITH_EXT = path.join(PCAP_WITH_EXT_DATA_DIR, + "OS-0-128-U1_v2.3.0_10.pcap") +EXT_PATH = path.join(PCAP_WITH_EXT_DATA_DIR, "extrinsic_parameters.json") + + +sensor_names = ["122150000150", "992313000353", "992225001114"] + + +def test_resolve_extrinscs_with_no_extrinscs(): + extrinsics = resolve_extrinsics( + data_path=PCAP_PATH_WITH_NO_EXT, sensor_names=sensor_names) + assert len(extrinsics) == 0 + + +def test_resolve_extrinscs_with_sensor_names(): + extrinsics = resolve_extrinsics(data_path=PCAP_PATH_WITH_EXT) + assert len(extrinsics) == 0 + + extrinsics = resolve_extrinsics( + data_path=PCAP_PATH_WITH_EXT, sensor_names=[sensor_names[0]]) + assert len(extrinsics) == 1 + assert extrinsics[0][0].shape == (4, 4) + assert extrinsics[0][1] == EXT_PATH + + extrinsics = resolve_extrinsics( + data_path=PCAP_PATH_WITH_EXT, sensor_names=sensor_names) + assert len(extrinsics) == 3 + for ext, src in extrinsics: + assert ext.shape == (4, 4) + assert src == EXT_PATH + + +def test_resolve_extrinscs_with_sensor_infos(): + + sensor_infos = [SensorInfo()] * len(sensor_names) + for si, sn in zip(sensor_infos, sensor_names): + si.sn = sn + + extrinsics = resolve_extrinsics(data_path=PCAP_PATH_WITH_EXT) + assert len(extrinsics) == 0 + + extrinsics = resolve_extrinsics( + data_path=PCAP_PATH_WITH_EXT, infos=[sensor_infos[0]]) + assert len(extrinsics) == 1 + assert extrinsics[0][0].shape == (4, 4) + assert extrinsics[0][1] == EXT_PATH + + extrinsics = resolve_extrinsics( + data_path=PCAP_PATH_WITH_EXT, infos=sensor_infos) + assert len(extrinsics) == 3 + for ext, src in extrinsics: + assert ext.shape == (4, 4) + assert src == EXT_PATH + + +def test_resolve_extrinscs_using_dir(): + + extrinsics = resolve_extrinsics(data_path=PCAP_WITH_EXT_DATA_DIR) + assert len(extrinsics) == 0 + + extrinsics = resolve_extrinsics( + data_path=PCAP_WITH_EXT_DATA_DIR, sensor_names=[sensor_names[0]]) + assert len(extrinsics) == 1 + assert extrinsics[0][0].shape == (4, 4) + assert extrinsics[0][1] == EXT_PATH + + extrinsics = resolve_extrinsics( + data_path=PCAP_WITH_EXT_DATA_DIR, sensor_names=sensor_names) + assert len(extrinsics) == 3 + for ext, src in extrinsics: + assert ext.shape == (4, 4) + assert src == EXT_PATH + + +def test_open_source_with_file_that_has_no_valid_extrinscs(): + ss = open_source(source_url=PCAP_PATH_WITH_NO_EXT, sensor_idx=0) + np.testing.assert_array_equal(ss.metadata.extrinsic, np.eye(4)) + + +def test_open_source_with_file_that_has_no_valid_extrinscs_but_supply_array(): + ss = open_source(source_url=PCAP_PATH_WITH_NO_EXT, + sensor_idx=0, extrinsics=np.ones((4, 4))) + np.testing.assert_array_equal(ss.metadata.extrinsic, np.ones((4, 4))) + + +def test_open_source_with_file_that_has_no_valid_extrinscs_but_supply_extrinscs_path(): + ss = open_source(source_url=PCAP_PATH_WITH_NO_EXT, + sensor_idx=0, extrinsics=EXT_PATH) + array_cmp = ss.metadata.extrinsic != np.eye(4) + assert array_cmp.any() + + +def test_open_source_with_file_that_has_valid_extrinscs(): + ss = open_source(source_url=PCAP_PATH_WITH_EXT, + sensor_idx=0) + array_cmp = ss.metadata.extrinsic != np.eye(4) + assert array_cmp.any() diff --git a/python/tests/test_sdk_utils.py b/python/tests/test_sdk_utils.py new file mode 100644 index 00000000..cd9d71ff --- /dev/null +++ b/python/tests/test_sdk_utils.py @@ -0,0 +1,89 @@ +import pytest +import tempfile +from os.path import commonprefix +from pathlib import Path +from ouster.sdk.util.metadata import resolve_metadata, \ + resolve_metadata_multi, data_must_be_a_file_err, meta_must_be_a_file_err + + +def test_resolve_metadata_when_data_not_a_file(): + """It should raise an exception if the data path is not a file""" + with pytest.raises(ValueError, match=data_must_be_a_file_err): + resolve_metadata('') + + +def test_resolve_metadata_when_data_not_a_file_2(): + """It should raise an exception if the data path is not a file""" + with pytest.raises(ValueError, match=data_must_be_a_file_err): + with tempfile.TemporaryDirectory() as directory: + resolve_metadata(directory) + + +def test_resolve_metadata_when_metadata_path_provided_is_not_a_file(): + """It should raise an exception if the provided metadata path is + not a file.""" + with pytest.raises(ValueError, match=meta_must_be_a_file_err): + with tempfile.NamedTemporaryFile() as f: + with tempfile.TemporaryDirectory() as directory: + resolve_metadata(f.name, directory) + + +def test_resolve_metadata_min_prefix(): + """When there is no JSON file that has a common prefix with the data file, + resolve_metadata should return None.""" + with tempfile.TemporaryDirectory() as directory: + dir_path = Path(directory) + # create some test files (one representing data, one metadata) + + test_data_filename = 'foo' + test_meta_filename = 'tmpfile' + assert not commonprefix([test_data_filename, test_meta_filename]) + open(dir_path / test_data_filename, 'a').close() + open(dir_path / f'{test_meta_filename}.json', 'a').close() + + assert resolve_metadata(dir_path / test_data_filename) is None + + +def test_resolve_metadata_min_prefix_2(): + """The minimum common prefix between a data path and a resolved metadata + file should have a length greater than zero.""" + with tempfile.TemporaryDirectory() as directory: + dir_path = Path(directory) + # create some test files (one representing data, one metadata) + test_filename = 'tmpfile' + open(dir_path / test_filename, 'a').close() + open(dir_path / f'{test_filename}.json', 'a').close() + + assert resolve_metadata(dir_path / test_filename) == str(dir_path / f'{test_filename}.json') + + +def test_resolve_metadata_multi(): + """It should return an empty list if no JSON files match the data path.""" + with tempfile.TemporaryDirectory() as directory: + dir_path = Path(directory) + # create some test files (one representing data, one metadata) + + test_data_filename = 'foo' + test_meta_filename = 'tmpfile' + assert not commonprefix([test_data_filename, test_meta_filename]) + open(dir_path / test_data_filename, 'a').close() + open(dir_path / f'{test_meta_filename}.json', 'a').close() + + assert resolve_metadata_multi(dir_path / test_data_filename) == [] + + +def test_resolve_metadata_multi_2(): + """It should only files that exist and share a prefix with the data file.""" + with tempfile.TemporaryDirectory() as directory: + dir_path = Path(directory) + # create some test files (one representing data, one metadata) + + test_data_filename = 'tmpfile' + test_meta_filename = 'tmpfile' + open(dir_path / test_data_filename, 'a').close() + open(dir_path / f'{test_meta_filename}.json', 'a').close() + open(dir_path / f'{test_meta_filename}.2.json', 'a').close() + + assert set(resolve_metadata_multi(dir_path / test_data_filename)) == set([ + str(Path(dir_path) / f'{test_meta_filename}.json'), str(Path(dir_path) / f'{test_meta_filename}.2.json') + ]) diff --git a/python/tests/test_single_scan_source.py b/python/tests/test_single_scan_source.py new file mode 100644 index 00000000..b8e1b814 --- /dev/null +++ b/python/tests/test_single_scan_source.py @@ -0,0 +1,64 @@ +""" +Copyright (c) 2024, Ouster, Inc. +All rights reserved. + +This module tests the compatibility between the ScanSource interface obtained +from a MultiScanSource reduced using single_source() and the original/legacy +ScanSource interfaces based of the concrete implementations Scans/osf.Scans +""" + +import os + +from tests.conftest import PCAPS_DATA_DIR, OSFS_DATA_DIR + + +def test_single_scan_source_pcap() -> None: + file_path = os.path.join(PCAPS_DATA_DIR, 'OS-0-128-U1_v2.3.0_1024x10.pcap') + + # old interface + from ouster.sdk import client, pcap + from ouster.sdk.util import resolve_metadata + + meta_path = resolve_metadata(file_path) + assert meta_path + meta = client.SensorInfo(open(meta_path).read()) + pcap_source = pcap.Pcap(file_path, meta) + scans = client.Scans(pcap_source) + + from ouster.sdk.client import MultiScanSource + from ouster.sdk.pcap import PcapScanSource # type: ignore + + # new interface + scan_source: MultiScanSource + scan_source = PcapScanSource(file_path, cycle=False) + ss = scan_source.single_source(0) + + ref_ids = [s.frame_id for s in scans] + upd_ids = [s.frame_id for s in ss] + + assert len(ref_ids) == len(upd_ids) + assert ref_ids == upd_ids + + +def test_single_scan_source_osf() -> None: + file_path = os.path.join(OSFS_DATA_DIR, "OS-1-128_v2.3.0_1024x10_lb_n3.osf") + + # old interface + import ouster.sdk.osf as osf + + scans: osf.Scans + scans = osf.Scans(file_path, cycle=False, sensor_id=0) + + # new interface + from ouster.sdk.client import MultiScanSource + from ouster.sdk.osf import OsfScanSource + + scan_source: MultiScanSource + scan_source = OsfScanSource(file_path, cycle=False) + ss = scan_source.single_source(0) + + ref_ids = [s.frame_id for s in scans] + upd_ids = [s.frame_id for s in ss] # type: ignore + + assert len(ref_ids) == len(upd_ids) + assert ref_ids == upd_ids diff --git a/python/tests/test_viz.py b/python/tests/test_viz.py index 2c667d89..ddc2990f 100644 --- a/python/tests/test_viz.py +++ b/python/tests/test_viz.py @@ -11,14 +11,14 @@ import random -from ouster import client +from ouster.sdk import client # test env may not have opengl, but all test modules are imported during # collection. Import is still needed to typecheck if TYPE_CHECKING: - import ouster.viz as viz + import ouster.sdk.viz as viz else: - viz = pytest.importorskip('ouster.viz') + viz = pytest.importorskip('ouster.sdk.viz') # mark all tests in this module so they only run with the --interactive flag pytestmark = pytest.mark.interactive @@ -480,6 +480,15 @@ def test_point_viz_destruction() -> None: assert ref() is None +def test_palette_lengths(meta: client.SensorInfo, + scan: client.LidarScan) -> None: + """Check that LidarScanViz has matching palette lengths.""" + point_viz = viz.PointViz("Test Viz") + scan_viz = viz.LidarScanViz(meta, point_viz) + + assert len(scan_viz._refl_cloud_palettes) == len(scan_viz._cloud_palettes) + + @pytest.mark.parametrize('test_key', ['single-2.3']) def test_scan_viz_destruction(meta: client.SensorInfo, point_viz: viz.PointViz) -> None: diff --git a/python/tests/test_viz_utils.py b/python/tests/test_viz_utils.py index aaf620c9..9c8ded2c 100644 --- a/python/tests/test_viz_utils.py +++ b/python/tests/test_viz_utils.py @@ -11,13 +11,13 @@ import math -from ouster import client -import ouster.pcap as pcap +from ouster.sdk import client +import ouster.sdk.pcap as pcap from ouster.sdk.util import resolve_metadata -import ouster.sdk.pose_util as pu +import ouster.sdk.util.pose_util as pu -from ouster.viz import grey_palette -from ouster.viz.scans_accum import ScansAccumulator +from ouster.sdk.viz import grey_palette +from ouster.sdk.viz.scans_accum import ScansAccumulator try: from scipy.spatial.transform import Rotation as R @@ -30,9 +30,9 @@ # test env may not have opengl, but all test modules are imported during # collection. Import is still needed to typecheck if TYPE_CHECKING: - import ouster.viz as viz + import ouster.sdk.viz as viz else: - viz = pytest.importorskip('ouster.viz') + viz = pytest.importorskip('ouster.sdk.viz') # Loose vector type Vector = Union[List, Tuple, np.ndarray] diff --git a/python/tests/test_xyzlut.py b/python/tests/test_xyzlut.py index 9baea467..640c13eb 100644 --- a/python/tests/test_xyzlut.py +++ b/python/tests/test_xyzlut.py @@ -8,8 +8,8 @@ import numpy as np import pytest -from ouster import client -import ouster.client._digest as digest +from ouster.sdk import client +import ouster.sdk.client._digest as digest from ouster.sdk.examples import reference diff --git a/python/tox.ini b/python/tox.ini index 29e61781..4d6bab84 100644 --- a/python/tox.ini +++ b/python/tox.ini @@ -20,14 +20,13 @@ commands = [testenv:py{37,38,39,310,311}-use_wheels] description = installs ouster-sdk-python from wheels and runs tests -passenv = WHEELS_DIR, PIP_CACHE_DIR_BASE +passenv = WHEELS_DIR skipsdist = true skip_install = true parallel_show_output = true commands = - pip install --cache-dir={env:PIP_CACHE_DIR_BASE}/{envname} --force-reinstall --upgrade --pre -f {env:WHEELS_DIR} --no-index --no-cache-dir ouster-sdk[test] + pip install --force-reinstall --upgrade --pre -f {env:WHEELS_DIR} --no-index --no-cache-dir ouster-sdk[test] pytest tests/ -o junit_suite_name="ouster-sdk-{env:ID}-{env:VERSION_ID}-{envname}" \ - -o cache_dir="{env:PIP_CACHE_DIR_BASE}/{envname}" \ --junit-prefix="{env:ID}__{env:VERSION_ID}__{envname}" \ --junitxml="{env:ARTIFACT_DIR}/tox-tests/ouster-sdk-{env:ID}-{env:VERSION_ID}-{envname}.xml" diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt index be3da24e..faf7405a 100644 --- a/tests/CMakeLists.txt +++ b/tests/CMakeLists.txt @@ -7,12 +7,14 @@ endif() find_package(GTest REQUIRED) find_package(jsoncpp REQUIRED) +include(Coverage) add_executable(bcompat_meta_json_test bcompat_meta_json_test.cpp) -target_link_libraries(bcompat_meta_json_test +target_link_libraries(bcompat_meta_json_test PRIVATE OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(bcompat_meta_json_test) add_test(NAME bcompat_meta_json_test COMMAND bcompat_meta_json_test --gtest_output=xml:bcompat_meta_json_test.xml) @@ -29,6 +31,8 @@ target_link_libraries(metadata_test OusterSDK::ouster_client GTest::gtest GTest::gtest_main PRIVATE jsoncpp_lib) +CodeCoverageFunctionality(metadata_test) + add_test(NAME metadata_test COMMAND metadata_test --gtest_output=xml:metadata_test.xml) set_tests_properties( metadata_test @@ -40,7 +44,8 @@ set_tests_properties( add_executable(lidar_scan_test lidar_scan_test.cpp) -target_link_libraries(lidar_scan_test OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +target_link_libraries(lidar_scan_test PRIVATE OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(lidar_scan_test) add_test(NAME lidar_scan_test COMMAND lidar_scan_test --gtest_output=xml:lidar_scan_test.xml) set_tests_properties( @@ -52,7 +57,8 @@ set_tests_properties( add_executable(cartesian_test cartesian_test.cpp util.h) -target_link_libraries(cartesian_test OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +target_link_libraries(cartesian_test PRIVATE OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(cartesian_test) add_test(NAME cartesian_test COMMAND cartesian_test --gtest_output=xml:cartesian_test.xml) set_tests_properties( @@ -64,7 +70,8 @@ set_tests_properties( add_executable(metadata_errors_test metadata_errors_test.cpp) -target_link_libraries(metadata_errors_test OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +target_link_libraries(metadata_errors_test PRIVATE OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(metadata_errors_test) add_test(NAME metadata_errors_test COMMAND metadata_errors_test --gtest_output=xml:metadata_errors_test.xml) set_tests_properties( @@ -76,7 +83,8 @@ set_tests_properties( add_executable(pcap_test pcap_test.cpp) -target_link_libraries(pcap_test OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +target_link_libraries(pcap_test PRIVATE OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(pcap_test) add_test(NAME pcap_test COMMAND pcap_test --gtest_output=xml:pcap_test.xml) set_tests_properties( @@ -88,13 +96,15 @@ set_tests_properties( add_executable(profile_extension_test profile_extension_test.cpp) -target_link_libraries(profile_extension_test OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +target_link_libraries(profile_extension_test PRIVATE OusterSDK::ouster_client GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(profile_extension_test) add_test(NAME profile_extension_test COMMAND profile_extension_test --gtest_output=xml:profile_extension_test.xml) add_executable(fusa_profile_test fusa_profile_test.cpp) -target_link_libraries(fusa_profile_test OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +target_link_libraries(fusa_profile_test PRIVATE OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(fusa_profile_test) add_test(NAME fusa_profile_test COMMAND fusa_profile_test --gtest_output=xml:fusa_profile_test.xml) set_tests_properties( @@ -106,7 +116,8 @@ set_tests_properties( add_executable(parsing_benchmark_test parsing_benchmark_test.cpp util.h) -target_link_libraries(parsing_benchmark_test OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +target_link_libraries(parsing_benchmark_test PRIVATE OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(parsing_benchmark_test) add_test(NAME parsing_benchmark_test COMMAND parsing_benchmark_test --gtest_output=xml:parsing_benchmark_test.xml) @@ -119,7 +130,8 @@ set_tests_properties( add_executable(scan_batcher_test scan_batcher_test.cpp util.h) -target_link_libraries(scan_batcher_test OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +target_link_libraries(scan_batcher_test PRIVATE OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(scan_batcher_test) add_test(NAME scan_batcher_test COMMAND scan_batcher_test --gtest_output=xml:parsing_benchmark_test.xml) @@ -132,7 +144,8 @@ set_tests_properties( add_executable(packet_writer_test packet_writer_test.cpp util.h) -target_link_libraries(packet_writer_test OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +target_link_libraries(packet_writer_test PRIVATE OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(packet_writer_test) add_test(NAME packet_writer_test COMMAND packet_writer_test --gtest_output=xml:parsing_benchmark_test.xml) @@ -142,3 +155,17 @@ set_tests_properties( ENVIRONMENT DATA_DIR=${CMAKE_CURRENT_LIST_DIR}/pcaps/ ) + +add_executable(udp_queue_test udp_queue_test.cpp) + +target_link_libraries(udp_queue_test PRIVATE OusterSDK::ouster_client OusterSDK::ouster_pcap GTest::gtest GTest::gtest_main) +CodeCoverageFunctionality(udp_queue_test) + +add_test(NAME udp_queue_test COMMAND udp_queue_test --gtest_output=xml:udp_queue_test.xml) + +set_tests_properties( + udp_queue_test + PROPERTIES + ENVIRONMENT + DATA_DIR=${CMAKE_CURRENT_LIST_DIR}/pcaps/ +) diff --git a/tests/cartesian_test.cpp b/tests/cartesian_test.cpp index 7f57e1d5..9bf21186 100644 --- a/tests/cartesian_test.cpp +++ b/tests/cartesian_test.cpp @@ -40,8 +40,8 @@ INSTANTIATE_TEST_CASE_P(CartesianParametrisedTests, std::pair{4096, 128})); TEST(CartesianParametrisedTestFixture, CartesianFunctionsMatch) { - const auto WIDTH = 512; - const auto HEIGHT = 64; + const auto WIDTH = 256; + const auto HEIGHT = 32; const auto ROWS = WIDTH * HEIGHT; const auto COLS = 3; @@ -62,8 +62,8 @@ TEST(CartesianParametrisedTestFixture, CartesianFunctionsMatch) { } TEST(CartesianParametrisedTestFixture, CartesianFunctionsMatchF) { - const auto WIDTH = 512; - const auto HEIGHT = 64; + const auto WIDTH = 256; + const auto HEIGHT = 32; const auto ROWS = WIDTH * HEIGHT; const auto COLS = 3; @@ -113,8 +113,8 @@ TEST_P(CartesianParametrisedTestFixture, SpeedCheck) { PointsF pointsF = PointsF(ROWS, COLS); img_t range = img_t(WIDTH, HEIGHT); - constexpr int N_SCANS = 1000; - constexpr int MOVING_AVG_WINDOW = 100; + constexpr int N_SCANS = 100; + constexpr int MOVING_AVG_WINDOW = 30; using MovingAverage64 = MovingAverage; static std::map mv; diff --git a/tests/lidar_scan_test.cpp b/tests/lidar_scan_test.cpp index 8bc90a08..b002fffe 100644 --- a/tests/lidar_scan_test.cpp +++ b/tests/lidar_scan_test.cpp @@ -11,6 +11,7 @@ #include #include #include +#include #include #include #include @@ -443,3 +444,53 @@ TEST(LidarScan, packet_timestamp_3) { EXPECT_EQ(scan.packet_timestamp()[0], packet.host_timestamp); EXPECT_EQ(scan.packet_timestamp()[1], 0); } + +TEST(LidarScan, test_get_first_valid_packet_timestamp) { + int w = 1024; + int h = 32; + auto scan = ouster::LidarScan(w, h); + EXPECT_EQ(scan.packet_timestamp().rows(), w / DEFAULT_COLUMNS_PER_PACKET); + ASSERT_TRUE((scan.packet_timestamp() == 0).all()); + + auto packet_ts = scan.packet_timestamp(); + // fill in some default values + std::iota(packet_ts.begin(), packet_ts.end(), 1); + ASSERT_TRUE((packet_ts == scan.packet_timestamp()).all()); + + // no packet found + EXPECT_EQ(scan.get_first_valid_packet_timestamp(), 0); + + // first packet + scan.status()[1] = 1; + EXPECT_EQ(scan.get_first_valid_packet_timestamp(), 1); + + // fifth packet + scan.status()[1] = 0; + scan.status()[74] = 1; + EXPECT_EQ(scan.get_first_valid_packet_timestamp(), 5); + + scan.status()[74] = 0; + scan.status()[1023] = 1; + EXPECT_EQ(scan.get_first_valid_packet_timestamp(), 64); +} + +TEST(LidarScan, destagger) { + // It raises std::invalid_argument when the image height doesn't match the + // shift rows + int w = 32; + int h = 32; + auto scan = ouster::LidarScan(w, h); + std::vector shift_by_row; + const auto& range = scan.field(ChanField::RANGE); + EXPECT_THROW( + { + try { + ouster::destagger(range, shift_by_row); + } catch (const std::invalid_argument& e) { + ASSERT_STREQ(e.what(), + "image height does not match shifts size"); + throw; + } + }, + std::invalid_argument); +} diff --git a/tests/osfs/empty_osf.osf b/tests/osfs/empty_osf.osf new file mode 100644 index 00000000..8fe3d379 Binary files /dev/null and b/tests/osfs/empty_osf.osf differ diff --git a/tests/packet_writer_test.cpp b/tests/packet_writer_test.cpp index 87144d9a..0d6cdfc0 100644 --- a/tests/packet_writer_test.cpp +++ b/tests/packet_writer_test.cpp @@ -198,6 +198,14 @@ TEST_P(PacketWriterTest, packet_writer_headers_test) { pw.set_frame_id(p.buf.data(), 777); EXPECT_EQ(pf.frame_id(p.buf.data()), 777); + + if (profile != PROFILE_LIDAR_LEGACY) { + pw.set_init_id(p.buf.data(), 0x123456); + EXPECT_EQ(pf.init_id(p.buf.data()), 0x123456); + + pw.set_prod_sn(p.buf.data(), 0x1234567890); + EXPECT_EQ(pf.prod_sn(p.buf.data()), 0x1234567890); + } } TEST_P(PacketWriterTest, packet_writer_randomize_test) { @@ -258,12 +266,28 @@ TEST_P(PacketWriterTest, packet_writer_randomize_test) { }; ouster::impl::foreach_field(ls, verify_field); + auto g = std::mt19937(0xdeadbeef); + auto dinit_id = std::uniform_int_distribution(0, 0xFFFFFF); + auto dserial_no = std::uniform_int_distribution(0, 0xFFFFFFFFFF); + + uint32_t init_id = dinit_id(g); // 24 bits + uint64_t serial_no = dserial_no(g); // 40 bits + // produced and re-parsed packets should result in the same scan auto packets = std::vector{}; - ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets)); + ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets), init_id, + serial_no); ASSERT_EQ(packets.size(), 64); + // validate the init id and serial no in each packet if supported + if (profile != PROFILE_LIDAR_LEGACY) { + for (const auto& p : packets) { + ASSERT_EQ(init_id, pf.init_id(p.buf.data())); + ASSERT_EQ(serial_no, pf.prod_sn(p.buf.data())); + } + } + auto ls2 = LidarScan(columns_per_frame, pixels_per_column, profile, columns_per_packet); ScanBatcher batcher(columns_per_frame, pf); @@ -311,7 +335,8 @@ TEST_P(PacketWriterTest, scans_to_packets_skips_dropped_packets_test) { ouster::impl::foreach_field(ls, randomise); auto packets_orig = std::vector{}; - ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets_orig)); + ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets_orig), 0, + 0); ASSERT_EQ(packets_orig.size(), 64); @@ -332,8 +357,8 @@ TEST_P(PacketWriterTest, scans_to_packets_skips_dropped_packets_test) { } auto packets_repr = std::vector{}; - ouster::impl::scan_to_packets(ls_repr, pw, - std::back_inserter(packets_repr)); + ouster::impl::scan_to_packets(ls_repr, pw, std::back_inserter(packets_repr), + 0, 0); EXPECT_EQ(packets_repr.size(), 63); EXPECT_EQ(packets_repr[14].host_timestamp, 25); @@ -400,7 +425,8 @@ TEST_P(PacketWriterDataTest, packet_writer_data_repr_test) { // produced and re-parsed fields should match auto packets = std::vector{}; - ouster::impl::scan_to_packets(ls_orig, pw, std::back_inserter(packets)); + ouster::impl::scan_to_packets(ls_orig, pw, std::back_inserter(packets), 0, + 0); ASSERT_EQ(packets.size(), n_packets); auto ls_repr = @@ -440,7 +466,8 @@ TEST_P(PacketWriterDataTest, packet_writer_raw_headers_match_test) { // produced and re-parsed RAW_HEADERS fields should match auto packets = std::vector{}; - ouster::impl::scan_to_packets(rh_ls_orig, pw, std::back_inserter(packets)); + ouster::impl::scan_to_packets(rh_ls_orig, pw, std::back_inserter(packets), + 0, 0); ASSERT_EQ(packets.size(), n_packets); auto rh_ls_repr = diff --git a/tests/pcap_test.cpp b/tests/pcap_test.cpp index 71c939cc..172e9a76 100644 --- a/tests/pcap_test.cpp +++ b/tests/pcap_test.cpp @@ -122,8 +122,9 @@ TEST(IndexedPcapReader, constructor) { data_dir + "/OS-0-32-U1_v2.2.0_1024x10-single-packet.pcap"; std::string meta_filename = data_dir + "/OS-0-32-U1_v2.2.0_1024x10.json"; - IndexedPcapReader pcap(filename, - {meta_filename, meta_filename, meta_filename}); + IndexedPcapReader pcap( + filename, + std::vector{meta_filename, meta_filename, meta_filename}); EXPECT_EQ(pcap.index_.frame_indices_.size(), 3); EXPECT_EQ(pcap.previous_frame_ids_.size(), 3); } @@ -133,7 +134,7 @@ TEST(IndexedPcapReader, frame_count) { auto data_dir = getenvs("DATA_DIR"); std::string filename = data_dir + "/OS-0-32-U1_v2.2.0_1024x10-single-packet.pcap"; - IndexedPcapReader pcap(filename, {}); + IndexedPcapReader pcap(filename, std::vector{}); pcap.index_.frame_indices_.push_back(PcapIndex::frame_index()); pcap.index_.frame_indices_.at(0).push_back(0); @@ -147,7 +148,7 @@ TEST(IndexedPcapReader, seek_to_frame) { auto data_dir = getenvs("DATA_DIR"); std::string filename = data_dir + "/OS-2-128-U1_v2.3.0_1024x10.pcap"; std::string meta_filename = data_dir + "/OS-2-128-U1_v2.3.0_1024x10.json"; - IndexedPcapReader pcap(filename, {meta_filename}); + IndexedPcapReader pcap(filename, std::vector{meta_filename}); std::vector progress; while (pcap.next_packet()) { diff --git a/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_10.pcap b/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_10.pcap new file mode 100644 index 00000000..18ca30b5 Binary files /dev/null and b/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_10.pcap differ diff --git a/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json b/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json new file mode 100644 index 00000000..47bea351 --- /dev/null +++ b/tests/pcap_with_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json @@ -0,0 +1,464 @@ +{ + "base_pn": "", + "base_sn": "", + "beam_altitude_angles": + [ + 45.75, + 44.7, + 43.93, + 43.47, + 42.72, + 41.68, + 40.94, + 40.46, + 39.7, + 38.69, + 37.95, + 37.47, + 36.71, + 35.72, + 35, + 34.51, + 33.74, + 32.78, + 32.06, + 31.56, + 30.8, + 29.85, + 29.13, + 28.62, + 27.86, + 26.93, + 26.23, + 25.7, + 24.93, + 24.03, + 23.33, + 22.79, + 22.03, + 21.16, + 20.46, + 19.91, + 19.13, + 18.3, + 17.6, + 17.04, + 16.27, + 15.45, + 14.76, + 14.19, + 13.41, + 12.62, + 11.94, + 11.34, + 10.55, + 9.81, + 9.13, + 8.51, + 7.71, + 6.99, + 6.31, + 5.68, + 4.87, + 4.17, + 3.5, + 2.86, + 2.06, + 1.37, + 0.71, + 0.04, + -0.77, + -1.43, + -2.09, + -2.77, + -3.6, + -4.23, + -4.9, + -5.6, + -6.43, + -7.05, + -7.71, + -8.43, + -9.24, + -9.85, + -10.51, + -11.28, + -12.07, + -12.65, + -13.34, + -14.1, + -14.91, + -15.48, + -16.16, + -16.95, + -17.77, + -18.32, + -18.99, + -19.8, + -20.62, + -21.17, + -21.84, + -22.68, + -23.5, + -24.02, + -24.7, + -25.56, + -26.39, + -26.9, + -27.57, + -28.46, + -29.29, + -29.78, + -30.47, + -31.36, + -32.21, + -32.68, + -33.37, + -34.29, + -35.14, + -35.6, + -36.29, + -37.24, + -38.11, + -38.55, + -39.24, + -40.21, + -41.08, + -41.51, + -42.23, + -43.22, + -44.09, + -44.52, + -45.24, + -46.26 + ], + "beam_azimuth_angles": + [ + 11.24, + 3.93, + -3.29, + -10.34, + 10.81, + 3.75, + -3.18, + -10, + 10.41, + 3.61, + -3.1, + -9.7, + 10.09, + 3.49, + -3.02, + -9.45, + 9.81, + 3.39, + -2.96, + -9.23, + 9.56, + 3.3, + -2.91, + -9.05, + 9.35, + 3.21, + -2.87, + -8.88, + 9.15, + 3.14, + -2.83, + -8.76, + 8.99, + 3.06, + -2.8, + -8.63, + 8.85, + 3.02, + -2.79, + -8.55, + 8.73, + 2.96, + -2.77, + -8.49, + 8.64, + 2.93, + -2.77, + -8.41, + 8.55, + 2.89, + -2.76, + -8.38, + 8.5, + 2.86, + -2.77, + -8.37, + 8.45, + 2.83, + -2.77, + -8.35, + 8.41, + 2.82, + -2.77, + -8.35, + 8.39, + 2.8, + -2.8, + -8.39, + 8.39, + 2.79, + -2.82, + -8.43, + 8.4, + 2.8, + -2.83, + -8.47, + 8.42, + 2.79, + -2.87, + -8.55, + 8.46, + 2.79, + -2.92, + -8.63, + 8.51, + 2.78, + -2.96, + -8.73, + 8.58, + 2.8, + -3.01, + -8.86, + 8.66, + 2.83, + -3.07, + -9.01, + 8.78, + 2.85, + -3.14, + -9.17, + 8.9, + 2.88, + -3.22, + -9.35, + 9.05, + 2.91, + -3.31, + -9.57, + 9.23, + 2.95, + -3.41, + -9.83, + 9.44, + 3.01, + -3.51, + -10.12, + 9.7, + 3.08, + -3.64, + -10.47, + 9.99, + 3.15, + -3.8, + -10.88, + 10.34, + 3.24, + -3.98, + -11.35 + ], + "build_date": "2022-04-14T21:11:47Z", + "build_rev": "v2.3.0", + "client_version": "ouster_client 0.3.0", + "data_format": + { + "column_window": + [ + 0, + 1023 + ], + "columns_per_frame": 1024, + "columns_per_packet": 16, + "pixel_shift_by_row": + [ + 64, + 43, + 23, + 3, + 63, + 43, + 23, + 4, + 62, + 42, + 23, + 4, + 61, + 42, + 23, + 5, + 60, + 42, + 24, + 6, + 59, + 41, + 24, + 6, + 59, + 41, + 24, + 7, + 58, + 41, + 24, + 7, + 58, + 41, + 24, + 7, + 57, + 41, + 24, + 8, + 57, + 40, + 24, + 8, + 57, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 7, + 56, + 40, + 24, + 7, + 56, + 40, + 23, + 7, + 57, + 40, + 23, + 6, + 57, + 40, + 23, + 6, + 57, + 40, + 23, + 5, + 58, + 40, + 23, + 5, + 58, + 40, + 22, + 4, + 59, + 41, + 22, + 3, + 60, + 41, + 22, + 2, + 60, + 41, + 21, + 1, + 61, + 41, + 21, + 0 + ], + "pixels_per_column": 128, + "udp_profile_imu": "LEGACY", + "udp_profile_lidar": "RNG15_RFL8_NIR8" + }, + "hostname": "", + "image_rev": "ousteros-image-prod-aries-v2.3.0+20220415163956", + "imu_to_sensor_transform": + [ + 1, + 0, + 0, + 6.253, + 0, + 1, + 0, + -11.775, + 0, + 0, + 1, + 7.645, + 0, + 0, + 0, + 1 + ], + "initialization_id": 5431292, + "json_calibration_version": 4, + "lidar_mode": "1024x10", + "lidar_origin_to_beam_origin_mm": 27.67, + "lidar_to_sensor_transform": + [ + -1, + 0, + 0, + 0, + 0, + -1, + 0, + 0, + 0, + 0, + 1, + 36.18, + 0, + 0, + 0, + 1 + ], + "prod_line": "OS-0-128", + "prod_pn": "840-103574-06", + "prod_sn": "122150000150", + "proto_rev": "", + "status": "RUNNING", + "udp_port_imu": 7503, + "udp_port_lidar": 7502 +} \ No newline at end of file diff --git a/tests/pcap_with_extrinsics/extrinsic_parameters.json b/tests/pcap_with_extrinsics/extrinsic_parameters.json new file mode 100644 index 00000000..77bed700 --- /dev/null +++ b/tests/pcap_with_extrinsics/extrinsic_parameters.json @@ -0,0 +1,37 @@ +{ + "transforms": [ + { + "destination_frame": "world", + "p_x": 12.900038003921509, + "p_y": 54.88382339477539, + "p_z": 15.093940734863281, + "q_w": 0.7467111945152283, + "q_x": -0.14726592600345612, + "q_y": 0.10559403151273727, + "q_z": 0.6399883031845093, + "source_frame": "122150000150" + }, + { + "destination_frame": "world", + "p_x": 15.144094467163086, + "p_y": -2.7492141723632813, + "p_z": 13.969382286071777, + "q_w": 0.9823863506317139, + "q_x": 0.012154391966760159, + "q_y": 0.1813080757856369, + "q_z": 0.04355182126164436, + "source_frame": "992313000353" + }, + { + "destination_frame": "world", + "p_x": 2.866729736328125, + "p_y": 54.92229461669922, + "p_z": 14.699606895446777, + "q_w": 0.6737944483757019, + "q_x": -0.2551470398902893, + "q_y": 0.2224845290184021, + "q_z": 0.6568117141723633, + "source_frame": "992225001114" + } + ] +} \ No newline at end of file diff --git a/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_10.pcap b/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_10.pcap new file mode 100644 index 00000000..18ca30b5 Binary files /dev/null and b/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_10.pcap differ diff --git a/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json b/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json new file mode 100644 index 00000000..47bea351 --- /dev/null +++ b/tests/pcap_without_extrinsics/OS-0-128-U1_v2.3.0_1024x10.json @@ -0,0 +1,464 @@ +{ + "base_pn": "", + "base_sn": "", + "beam_altitude_angles": + [ + 45.75, + 44.7, + 43.93, + 43.47, + 42.72, + 41.68, + 40.94, + 40.46, + 39.7, + 38.69, + 37.95, + 37.47, + 36.71, + 35.72, + 35, + 34.51, + 33.74, + 32.78, + 32.06, + 31.56, + 30.8, + 29.85, + 29.13, + 28.62, + 27.86, + 26.93, + 26.23, + 25.7, + 24.93, + 24.03, + 23.33, + 22.79, + 22.03, + 21.16, + 20.46, + 19.91, + 19.13, + 18.3, + 17.6, + 17.04, + 16.27, + 15.45, + 14.76, + 14.19, + 13.41, + 12.62, + 11.94, + 11.34, + 10.55, + 9.81, + 9.13, + 8.51, + 7.71, + 6.99, + 6.31, + 5.68, + 4.87, + 4.17, + 3.5, + 2.86, + 2.06, + 1.37, + 0.71, + 0.04, + -0.77, + -1.43, + -2.09, + -2.77, + -3.6, + -4.23, + -4.9, + -5.6, + -6.43, + -7.05, + -7.71, + -8.43, + -9.24, + -9.85, + -10.51, + -11.28, + -12.07, + -12.65, + -13.34, + -14.1, + -14.91, + -15.48, + -16.16, + -16.95, + -17.77, + -18.32, + -18.99, + -19.8, + -20.62, + -21.17, + -21.84, + -22.68, + -23.5, + -24.02, + -24.7, + -25.56, + -26.39, + -26.9, + -27.57, + -28.46, + -29.29, + -29.78, + -30.47, + -31.36, + -32.21, + -32.68, + -33.37, + -34.29, + -35.14, + -35.6, + -36.29, + -37.24, + -38.11, + -38.55, + -39.24, + -40.21, + -41.08, + -41.51, + -42.23, + -43.22, + -44.09, + -44.52, + -45.24, + -46.26 + ], + "beam_azimuth_angles": + [ + 11.24, + 3.93, + -3.29, + -10.34, + 10.81, + 3.75, + -3.18, + -10, + 10.41, + 3.61, + -3.1, + -9.7, + 10.09, + 3.49, + -3.02, + -9.45, + 9.81, + 3.39, + -2.96, + -9.23, + 9.56, + 3.3, + -2.91, + -9.05, + 9.35, + 3.21, + -2.87, + -8.88, + 9.15, + 3.14, + -2.83, + -8.76, + 8.99, + 3.06, + -2.8, + -8.63, + 8.85, + 3.02, + -2.79, + -8.55, + 8.73, + 2.96, + -2.77, + -8.49, + 8.64, + 2.93, + -2.77, + -8.41, + 8.55, + 2.89, + -2.76, + -8.38, + 8.5, + 2.86, + -2.77, + -8.37, + 8.45, + 2.83, + -2.77, + -8.35, + 8.41, + 2.82, + -2.77, + -8.35, + 8.39, + 2.8, + -2.8, + -8.39, + 8.39, + 2.79, + -2.82, + -8.43, + 8.4, + 2.8, + -2.83, + -8.47, + 8.42, + 2.79, + -2.87, + -8.55, + 8.46, + 2.79, + -2.92, + -8.63, + 8.51, + 2.78, + -2.96, + -8.73, + 8.58, + 2.8, + -3.01, + -8.86, + 8.66, + 2.83, + -3.07, + -9.01, + 8.78, + 2.85, + -3.14, + -9.17, + 8.9, + 2.88, + -3.22, + -9.35, + 9.05, + 2.91, + -3.31, + -9.57, + 9.23, + 2.95, + -3.41, + -9.83, + 9.44, + 3.01, + -3.51, + -10.12, + 9.7, + 3.08, + -3.64, + -10.47, + 9.99, + 3.15, + -3.8, + -10.88, + 10.34, + 3.24, + -3.98, + -11.35 + ], + "build_date": "2022-04-14T21:11:47Z", + "build_rev": "v2.3.0", + "client_version": "ouster_client 0.3.0", + "data_format": + { + "column_window": + [ + 0, + 1023 + ], + "columns_per_frame": 1024, + "columns_per_packet": 16, + "pixel_shift_by_row": + [ + 64, + 43, + 23, + 3, + 63, + 43, + 23, + 4, + 62, + 42, + 23, + 4, + 61, + 42, + 23, + 5, + 60, + 42, + 24, + 6, + 59, + 41, + 24, + 6, + 59, + 41, + 24, + 7, + 58, + 41, + 24, + 7, + 58, + 41, + 24, + 7, + 57, + 41, + 24, + 8, + 57, + 40, + 24, + 8, + 57, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 8, + 56, + 40, + 24, + 7, + 56, + 40, + 24, + 7, + 56, + 40, + 23, + 7, + 57, + 40, + 23, + 6, + 57, + 40, + 23, + 6, + 57, + 40, + 23, + 5, + 58, + 40, + 23, + 5, + 58, + 40, + 22, + 4, + 59, + 41, + 22, + 3, + 60, + 41, + 22, + 2, + 60, + 41, + 21, + 1, + 61, + 41, + 21, + 0 + ], + "pixels_per_column": 128, + "udp_profile_imu": "LEGACY", + "udp_profile_lidar": "RNG15_RFL8_NIR8" + }, + "hostname": "", + "image_rev": "ousteros-image-prod-aries-v2.3.0+20220415163956", + "imu_to_sensor_transform": + [ + 1, + 0, + 0, + 6.253, + 0, + 1, + 0, + -11.775, + 0, + 0, + 1, + 7.645, + 0, + 0, + 0, + 1 + ], + "initialization_id": 5431292, + "json_calibration_version": 4, + "lidar_mode": "1024x10", + "lidar_origin_to_beam_origin_mm": 27.67, + "lidar_to_sensor_transform": + [ + -1, + 0, + 0, + 0, + 0, + -1, + 0, + 0, + 0, + 0, + 1, + 36.18, + 0, + 0, + 0, + 1 + ], + "prod_line": "OS-0-128", + "prod_pn": "840-103574-06", + "prod_sn": "122150000150", + "proto_rev": "", + "status": "RUNNING", + "udp_port_imu": 7503, + "udp_port_lidar": 7502 +} \ No newline at end of file diff --git a/tests/pcaps/empty_pcap.json b/tests/pcaps/empty_pcap.json new file mode 100644 index 00000000..93dca316 --- /dev/null +++ b/tests/pcaps/empty_pcap.json @@ -0,0 +1,554 @@ +{ + "beam_intrinsics": + { + "beam_altitude_angles": + [ + 10.72, + 10.56, + 10.39, + 10.21, + 10.06, + 9.9, + 9.74, + 9.56, + 9.39, + 9.23, + 9.07, + 8.9, + 8.72, + 8.55, + 8.39, + 8.23, + 8.05, + 7.87, + 7.71, + 7.54, + 7.37, + 7.18, + 7.03, + 6.87, + 6.68, + 6.52, + 6.35, + 6.18, + 6, + 5.83, + 5.66, + 5.49, + 5.31, + 5.14, + 4.98, + 4.8, + 4.61, + 4.46, + 4.27, + 4.12, + 3.93, + 3.76, + 3.59, + 3.43, + 3.24, + 3.08, + 2.9, + 2.74, + 2.56, + 2.37, + 2.22, + 2.04, + 1.85, + 1.68, + 1.52, + 1.35, + 1.16, + 0.99, + 0.83, + 0.67, + 0.48, + 0.3, + 0.15, + -0.01, + -0.21, + -0.39, + -0.56, + -0.72, + -0.92, + -1.09, + -1.26, + -1.43, + -1.6, + -1.76, + -1.95, + -2.12, + -2.3, + -2.47, + -2.64, + -2.82, + -3, + -3.15, + -3.33, + -3.5, + -3.68, + -3.85, + -4.02, + -4.21, + -4.38, + -4.55, + -4.73, + -4.89, + -5.06, + -5.24, + -5.41, + -5.58, + -5.75, + -5.94, + -6.1, + -6.27, + -6.44, + -6.62, + -6.78, + -6.94, + -7.12, + -7.3, + -7.46, + -7.63, + -7.8, + -7.98, + -8.15, + -8.31, + -8.47, + -8.65, + -8.82, + -8.99, + -9.15, + -9.34, + -9.5, + -9.66, + -9.83, + -10, + -10.17, + -10.32, + -10.49, + -10.67, + -10.84, + -10.99 + ], + "beam_azimuth_angles": + [ + 2.08, + 0.7, + -0.67, + -2.05, + 2.06, + 0.69, + -0.67, + -2.04, + 2.07, + 0.68, + -0.68, + -2.04, + 2.06, + 0.7, + -0.67, + -2.04, + 2.07, + 0.7, + -0.68, + -2.05, + 2.06, + 0.68, + -0.68, + -2.04, + 2.06, + 0.69, + -0.68, + -2.05, + 2.06, + 0.69, + -0.68, + -2.05, + 2.06, + 0.69, + -0.69, + -2.03, + 2.06, + 0.7, + -0.69, + -2.05, + 2.06, + 0.69, + -0.69, + -2.05, + 2.06, + 0.69, + -0.69, + -2.05, + 2.07, + 0.69, + -0.68, + -2.05, + 2.05, + 0.68, + -0.69, + -2.05, + 2.06, + 0.68, + -0.69, + -2.06, + 2.06, + 0.69, + -0.68, + -2.04, + 2.07, + 0.69, + -0.69, + -2.05, + 2.06, + 0.68, + -0.69, + -2.07, + 2.06, + 0.68, + -0.69, + -2.05, + 2.04, + 0.68, + -0.69, + -2.07, + 2.05, + 0.7, + -0.68, + -2.06, + 2.05, + 0.69, + -0.69, + -2.07, + 2.05, + 0.69, + -0.69, + -2.06, + 2.06, + 0.69, + -0.69, + -2.07, + 2.06, + 0.68, + -0.7, + -2.07, + 2.06, + 0.67, + -0.69, + -2.05, + 2.06, + 0.69, + -0.7, + -2.06, + 2.05, + 0.68, + -0.7, + -2.05, + 2.06, + 0.69, + -0.7, + -2.08, + 2.06, + 0.68, + -0.7, + -2.08, + 2.06, + 0.68, + -0.7, + -2.06, + 2.04, + 0.67, + -0.71, + -2.1 + ], + "beam_to_lidar_transform": + [ + 1, + 0, + 0, + 27.397, + 0, + 1, + 0, + 0, + 0, + 0, + 1, + 0, + 0, + 0, + 0, + 1 + ], + "lidar_origin_to_beam_origin_mm": 27.397 + }, + "calibration_status": + { + "reflectivity": + { + "timestamp": "", + "valid": false + } + }, + "client_version": "ouster_client 0.7.3", + "config_params": + { + "azimuth_window": + [ + 0, + 360000 + ], + "columns_per_packet": 16, + "lidar_mode": "512x10", + "multipurpose_io_mode": "OFF", + "nmea_baud_rate": "BAUD_9600", + "nmea_ignore_valid_char": 0, + "nmea_in_polarity": "ACTIVE_HIGH", + "nmea_leap_seconds": 0, + "operating_mode": "NORMAL", + "phase_lock_enable": false, + "phase_lock_offset": 0, + "signal_multiplier": 1, + "sync_pulse_in_polarity": "ACTIVE_HIGH", + "sync_pulse_out_angle": 360, + "sync_pulse_out_frequency": 1, + "sync_pulse_out_polarity": "ACTIVE_HIGH", + "sync_pulse_out_pulse_width": 10, + "timestamp_mode": "TIME_FROM_INTERNAL_OSC", + "udp_dest": "169.254.202.61", + "udp_port_imu": 7503, + "udp_port_lidar": 7502, + "udp_profile_imu": "LEGACY", + "udp_profile_lidar": "RNG19_RFL8_SIG16_NIR16_DUAL" + }, + "imu_intrinsics": + { + "imu_to_sensor_transform": + [ + 1, + 0, + 0, + -6.253, + 0, + 1, + 0, + 11.775, + 0, + 0, + 1, + 11.645, + 0, + 0, + 0, + 1 + ] + }, + "lidar_data_format": + { + "column_window": + [ + 0, + 511 + ], + "columns_per_frame": 512, + "columns_per_packet": 16, + "pixel_shift_by_row": + [ + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3, + 3, + 1, + -1, + -3 + ], + "pixels_per_column": 128, + "udp_profile_imu": "LEGACY", + "udp_profile_lidar": "RNG19_RFL8_SIG16_NIR16_DUAL" + }, + "lidar_intrinsics": + { + "lidar_to_sensor_transform": + [ + -1, + 0, + 0, + 0, + 0, + -1, + 0, + 0, + 0, + 0, + 1, + 78.296, + 0, + 0, + 0, + 1 + ] + }, + "ouster-sdk": + { + "changed_fields": + [ + "ouster-sdk" + ], + "client_version": "ouster_client 0.11.0c5", + "extrinsic": + [ + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0, + 0.0, + 0.0, + 0.0, + 0.0, + 1.0 + ], + "hostname": "", + "output_source": "updated_metadata_string" + }, + "sensor_info": + { + "build_date": "2023-03-14T20:21:40Z", + "build_rev": "v2.5.0-omega.7-14-g58d5def74", + "image_rev": "ousteros-image-prod-aries-v2.5.0-omega.7+20230314201607.staging.ci-git.master.58d5def74b", + "initialization_id": 7109746, + "prod_line": "OS-2-128", + "prod_pn": "840-104704-B", + "prod_sn": "992305000159", + "status": "RUNNING" + } +} \ No newline at end of file diff --git a/tests/pcaps/empty_pcap.pcap b/tests/pcaps/empty_pcap.pcap new file mode 100644 index 00000000..a3243045 Binary files /dev/null and b/tests/pcaps/empty_pcap.pcap differ diff --git a/tests/scan_batcher_test.cpp b/tests/scan_batcher_test.cpp index 0b707a95..9a2f4e8d 100644 --- a/tests/scan_batcher_test.cpp +++ b/tests/scan_batcher_test.cpp @@ -65,8 +65,16 @@ std::vector random_frame(UDPProfileLidar profile, }; ouster::impl::foreach_field(ls, randomise); + auto g = std::mt19937(0xdeadbeef); + auto dinit_id = std::uniform_int_distribution(0, 0xFFFFFF); + auto dserial_no = std::uniform_int_distribution(0, 0xFFFFFFFFFF); + + uint32_t init_id = dinit_id(g); // 24 bits + uint64_t serial_no = dserial_no(g); // 40 bits + auto packets = std::vector{}; - ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets)); + ouster::impl::scan_to_packets(ls, pw, std::back_inserter(packets), init_id, + serial_no); return packets; } @@ -461,7 +469,7 @@ TEST_P(ScanBatcherTest, scan_batcher_wraparound_test) { auto packet = std::make_unique(); std::memset(packet->buf.data(), 0, packet->buf.size()); packet->host_timestamp = 100; - pw.set_frame_id(packet->buf.data(), 65535); + pw.set_frame_id(packet->buf.data(), pw.max_frame_id); uint16_t m_id = columns_per_frame - columns_per_packet; uint64_t ts = 100; for (size_t icol = 0; icol < columns_per_packet; ++icol) { diff --git a/tests/udp_queue_test.cpp b/tests/udp_queue_test.cpp new file mode 100644 index 00000000..e9170ad0 --- /dev/null +++ b/tests/udp_queue_test.cpp @@ -0,0 +1,490 @@ +/** + * Copyright (c) 2023, Ouster, Inc. + * All rights reserved. + */ + +#include + +#include +#include +#include +#include +#include + +#include "ouster/pcap.h" +#include "ouster/types.h" +#include "ouster/udp_packet_source.h" +#include "util.h" + +// clang-format off +#include "ouster/impl/netcompat.h" +// clang-format on + +using namespace ouster::sensor; +using namespace ouster::sensor::impl; + +TEST(UdpQueueTest, lidar_packet_capacity_test) { + auto lp = LidarPacket(2048); + EXPECT_GE(lp.buf.capacity(), lp.buf.size() + 1); + + // this is a platform sanity check more than anything + uint8_t* ptr = lp.buf.data() + lp.buf.size(); + EXPECT_NO_THROW(*ptr = 0xca); + EXPECT_EQ(*(lp.buf.data() + lp.buf.size()), 0xca); +} + +struct non_default_constructible { + void* ptr; + non_default_constructible() = delete; + non_default_constructible(void* p) : ptr(p) {} +}; + +TEST(UdpQueueTest, ring_buffer_test) { + // should compile + RingBuffer{10, {nullptr}}; + // will not compile + // RingBuffer{10}; + + RingBuffer rb{10, 0}; + + EXPECT_EQ(rb.capacity(), 10); + EXPECT_EQ(rb.size(), 0); + EXPECT_EQ(rb.empty(), true); + EXPECT_EQ(rb.full(), false); + + EXPECT_THROW(rb.pop(), std::underflow_error); + + std::vector writes{}; + std::vector reads{}; + + // write + for (int i = 0; !rb.full(); ++i) { + writes.push_back(i); + rb.back() = i; + EXPECT_NO_THROW(rb.push()); + EXPECT_EQ(rb.size(), writes.size()); + } + EXPECT_EQ(rb.size(), rb.capacity()); + EXPECT_THROW(rb.push(), std::overflow_error); + + // read + while (!rb.empty()) { + reads.push_back(rb.front()); + EXPECT_NO_THROW(rb.pop()); + EXPECT_EQ(rb.size(), writes.size() - reads.size()); + } + EXPECT_EQ(rb.size(), 0); + EXPECT_THROW(rb.pop(), std::underflow_error); + + EXPECT_EQ(writes, reads); +} + +TEST(UdpQueueTest, event_queue_tests) { + auto eq = std::make_shared(); + + auto produce = [](std::shared_ptr eq, int consumers, int runs) { + std::vector events; + while (runs > 0) { + events.clear(); + + for (int c = 0; c < consumers; ++c) { + events.push_back({c, LIDAR_DATA}); + events.push_back({c, IMU_DATA}); + } + + eq->push(events.begin(), events.end()); + std::this_thread::sleep_for(std::chrono::microseconds(100)); + + --runs; + } + + for (int i = 0; i < consumers; ++i) eq->push({-1, client_state::EXIT}); + }; + + auto consume = [](std::shared_ptr eq, EventSet subscriptions, + int* lidar_counts, int* imu_counts) { + while (true) { + auto e = eq->next(subscriptions); + if (e.state == client_state::LIDAR_DATA) (*lidar_counts)++; + if (e.state == client_state::IMU_DATA) (*imu_counts)++; + if (e.state == client_state::EXIT) break; + } + }; + + int n_clients = 10; + int runs = 100; + std::thread prod(produce, eq, n_clients, runs); + + std::vector lidar_counts(n_clients); + std::vector imu_counts(n_clients); + std::vector consumers; + for (int i = 0; i < n_clients; ++i) { + EventSet subs = {{-1, client_state::EXIT}, + {i, client_state::LIDAR_DATA}, + {i, client_state::IMU_DATA}}; + consumers.emplace_back(consume, eq, subs, &lidar_counts[i], + &imu_counts[i]); + } + prod.join(); + for (auto& c : consumers) { + c.join(); + } + + for (auto& c : lidar_counts) { + EXPECT_EQ(c, runs); + } + for (auto& c : imu_counts) { + EXPECT_EQ(c, runs); + } +} + +using str_pair = std::pair; +class UdpQueuePcapTest : public ::testing::TestWithParam {}; + +// clang-format off +INSTANTIATE_TEST_CASE_P( + UdpQueuePcapTests, + UdpQueuePcapTest, + ::testing::Values( + str_pair{"OS-0-128-U1_v2.3.0_1024x10.pcap", + "OS-0-128-U1_v2.3.0_1024x10.json"}, + str_pair{"OS-0-32-U1_v2.2.0_1024x10.pcap", + "OS-0-32-U1_v2.2.0_1024x10.json"}, + str_pair{"OS-1-128_767798045_1024x10_20230712_120049.pcap", + "OS-1-128_767798045_1024x10_20230712_120049.json"}, + str_pair{"OS-1-128_v2.3.0_1024x10_lb_n3.pcap", + "OS-1-128_v2.3.0_1024x10.json"}, + str_pair{"OS-2-128-U1_v2.3.0_1024x10.pcap", + "OS-2-128-U1_v2.3.0_1024x10.json"}) +); +// clang-format on + +using namespace ouster::sensor_utils; + +template +struct delayer { + private: + Duration start_; + + public: + delayer() : start_(Duration::zero()) {} + + static Duration now() { + return std::chrono::duration_cast( + Clock::now().time_since_epoch()); + } + + Duration now_from_start() const { return now() - start_; } + + void start() { start_ = now(); } + + void reset() { start_ = Duration::zero(); } + + void delay(Duration next) const { + std::this_thread::sleep_for(next - now_from_start()); + } +}; + +class PcapReplay { + SOCKET sockfd_; + sockaddr_in dest_; + + PcapReader pcap_; + std::chrono::microseconds pcap_start_ts_; + + uint16_t lidar_port_; + uint16_t imu_port_; + uint16_t pcap_lidar_port_; + uint16_t pcap_imu_port_; + + void _send(const uint8_t* data, size_t len, uint16_t port) const { + auto dest = dest_; + dest.sin_port = htons(port); + sendto(sockfd_, (const char*)data, len, 0, (const sockaddr*)&dest, + sizeof(dest)); + } + + uint16_t _reroute_port(int port) const { + if (port == pcap_lidar_port_) + return lidar_port_; + else if (port == pcap_imu_port_) + return imu_port_; + return 0; + } + + public: + PcapReplay(std::string pcap_filename, uint16_t pcap_lidar_port, + uint16_t pcap_imu_port, uint16_t lidar_port, uint16_t imu_port) + : sockfd_(socket(AF_INET, SOCK_DGRAM, 0)), + pcap_(pcap_filename), + lidar_port_(lidar_port), + imu_port_(imu_port), + pcap_lidar_port_(pcap_lidar_port), + pcap_imu_port_(pcap_imu_port) { + if (!impl::socket_valid(sockfd_)) { + throw std::runtime_error("PcapReplay: failed to bind socket"); + } + + memset(&dest_, 0, sizeof(dest_)); + dest_.sin_family = AF_INET; + dest_.sin_addr.s_addr = inet_addr("127.0.0.1"); + + pcap_.next_packet(); + pcap_start_ts_ = pcap_.current_info().timestamp; + } + + ~PcapReplay() {} + + std::chrono::microseconds next_delay() const { + return pcap_.current_info().timestamp - pcap_start_ts_; + } + + int send(delayer delayer) { + _send(pcap_.current_data(), pcap_.current_length(), + _reroute_port(pcap_.current_info().dst_port)); + delayer.delay(next_delay()); + return pcap_.next_packet(); + } + + void reset() { + pcap_.reset(); + pcap_.next_packet(); + } +}; + +void replay(bool* stop, std::vector> replays, + int loop = 1) { + std::vector loops(replays.size(), 0); + std::vector alive(replays.size()); + std::iota(alive.begin(), alive.end(), 0); + + auto all_loops_complete = [&loops, loop] { + return std::all_of(loops.begin(), loops.end(), + [loop](int l) { return l >= loop; }); + }; + + auto next_replay_id = [&replays, &alive] { + auto it = std::min_element( + alive.begin(), alive.end(), [&replays](int id1, int id2) { + return replays[id1]->next_delay() < replays[id2]->next_delay(); + }); + return *it; + }; + + delayer delayer; + + delayer.start(); + + std::vector counts(replays.size(), 0); + + while (!(*stop)) { + if (loop && all_loops_complete()) break; + + int id = next_replay_id(); + + if (!replays[id]->send(delayer)) { + ++loops[id]; + replays[id]->reset(); + if (loop && loops[id] == loop) { + alive.erase(std::remove(alive.begin(), alive.end(), id)); + } + } + } +} + +TEST_P(UdpQueuePcapTest, single_client_test) { + // TODO: reenable once we figure out determinism + GTEST_SKIP(); + + auto data_dir = getenvs("DATA_DIR"); + const auto test_params = GetParam(); + auto info = metadata_from_json(data_dir + "/" + std::get<1>(test_params)); + auto pf = packet_format(info); + + std::vector lidar_packets; + std::vector imu_packets; + { // collect packets + PcapReader pcap(data_dir + "/" + std::get<0>(test_params)); + while (pcap.next_packet()) { + if (pcap.current_info().dst_port == info.udp_port_lidar) { + LidarPacket p(pcap.current_length()); + std::memcpy(p.buf.data(), pcap.current_data(), p.buf.size()); + lidar_packets.push_back(std::move(p)); + } + if (pcap.current_info().dst_port == info.udp_port_imu) { + ImuPacket p(pcap.current_length()); + std::memcpy(p.buf.data(), pcap.current_data(), p.buf.size()); + imu_packets.push_back(std::move(p)); + } + } + } + + size_t lidar_buf_size = 640; + size_t imu_buf_size = 100; + + int lidar_port = 50001; + int imu_port = 50002; + + BufferedUDPSource queue(init_client("127.0.0.1", lidar_port, imu_port), + lidar_buf_size, pf.lidar_packet_size, imu_buf_size, + pf.imu_packet_size); + + bool stop = false; + std::vector> replays; + replays.push_back(std::make_shared( + data_dir + "/" + std::get<0>(test_params), info.udp_port_lidar, + info.udp_port_imu, lidar_port, imu_port)); + + std::thread producer(&BufferedUDPSource::produce, &queue); + std::thread sensor_emu(&replay, &stop, replays, 1); + + size_t lidar_packets_recv = 0; + size_t imu_packets_recv = 0; + while (true) { + auto st = queue.pop(1.0); + + if (st == client_state::TIMEOUT) break; + if (st == client_state::EXIT) break; + if (static_cast(st) == Producer::CLIENT_OVERFLOW) break; + + if (st == client_state::LIDAR_DATA) { + Packet& p = queue.packet(st); + EXPECT_EQ(p.buf, lidar_packets[lidar_packets_recv].buf); + ++lidar_packets_recv; + } + if (st == client_state::IMU_DATA) { + Packet& p = queue.packet(st); + EXPECT_EQ(p.buf, imu_packets[imu_packets_recv].buf); + ++imu_packets_recv; + } + + queue.advance(st); + + if (lidar_packets_recv == lidar_packets.size() && + imu_packets_recv == imu_packets.size()) + break; + } + queue.shutdown(); + producer.join(); + sensor_emu.join(); + + EXPECT_EQ(lidar_packets_recv, lidar_packets.size()); + EXPECT_EQ(imu_packets_recv, imu_packets.size()); +} + +TEST(UdpQueueTest, multi_client_test) { + // TODO: reenable once we figure out determinism + GTEST_SKIP(); + + auto data_dir = getenvs("DATA_DIR"); + + std::vector inputs = { + str_pair{"OS-0-128-U1_v2.3.0_1024x10.pcap", + "OS-0-128-U1_v2.3.0_1024x10.json"}, + str_pair{"OS-0-32-U1_v2.2.0_1024x10.pcap", + "OS-0-32-U1_v2.2.0_1024x10.json"}, + str_pair{"OS-1-128_767798045_1024x10_20230712_120049.pcap", + "OS-1-128_767798045_1024x10_20230712_120049.json"}, + str_pair{"OS-1-128_v2.3.0_1024x10_lb_n3.pcap", + "OS-1-128_v2.3.0_1024x10.json"}, + str_pair{"OS-2-128-U1_v2.3.0_1024x10.pcap", + "OS-2-128-U1_v2.3.0_1024x10.json"}}; + + // set up reference + RingBufferMap orig_packets; + for (int i = 0, end = inputs.size(); i < end; ++i) { + auto&& p = inputs[i]; + + PcapReader pcap(data_dir + "/" + p.first); + auto info = metadata_from_json(data_dir + "/" + p.second); + auto&& pf = get_format(info); + + int n_lidar = 0, n_imu = 0; + while (pcap.next_packet()) { + if (pcap.current_info().dst_port == info.udp_port_lidar) ++n_lidar; + if (pcap.current_info().dst_port == info.udp_port_imu) ++n_imu; + } + Event e_lidar{i, client_state::LIDAR_DATA}; + Event e_imu{i, client_state::IMU_DATA}; + orig_packets.allocate(e_lidar, n_lidar + 1, + Packet(pf.lidar_packet_size)); + orig_packets.allocate(e_imu, n_imu + 1, Packet(pf.imu_packet_size)); + pcap.reset(); + + while (pcap.next_packet()) { + if (pcap.current_info().dst_port == info.udp_port_lidar) { + std::memcpy(orig_packets.back(e_lidar).buf.data(), + pcap.current_data(), + orig_packets.back(e_lidar).buf.size()); + orig_packets.push(e_lidar); + } + if (pcap.current_info().dst_port == info.udp_port_imu) { + std::memcpy(orig_packets.back(e_imu).buf.data(), + pcap.current_data(), + orig_packets.back(e_imu).buf.size()); + orig_packets.push(e_imu); + } + } + } + + int port = 50000; + + std::vector> replays; + bool replay_stop = false; + Producer producer; + std::vector> subs; + for (auto&& p : inputs) { + int lidar_port = port++; + int imu_port = port++; + auto info = metadata_from_json(data_dir + "/" + p.second); + replays.push_back(std::make_shared( + data_dir + "/" + p.first, info.udp_port_lidar, info.udp_port_imu, + lidar_port, imu_port)); + + auto id = producer.add_client( + init_client("localhost", lidar_port, imu_port), info, 1.f); + + subs.push_back(producer.subscribe( + EventSet{{-1, client_state::EXIT}, + {-1, client_state::CLIENT_ERROR}, + {id, client_state::LIDAR_DATA}, + {id, client_state::IMU_DATA}, + {id, client_state(Producer::CLIENT_OVERFLOW)}})); + } + + auto consumer = [&orig_packets](std::shared_ptr sub) { + while (true) { + auto e = sub->pop(1.0); + auto st = e.state; + + if (st == client_state::TIMEOUT) break; + if (st == client_state::EXIT) break; + if (st == client_state::CLIENT_ERROR) break; + if (static_cast(st) == Producer::CLIENT_OVERFLOW) break; + + if (st == client_state::LIDAR_DATA || + st == client_state::IMU_DATA) { + EXPECT_EQ(sub->packet(e).buf, orig_packets.front(e).buf); + orig_packets.pop(e); + sub->advance(e); + } + } + }; + + std::thread sensor_emu(&replay, &replay_stop, replays, 1); + std::thread producer_thread(&Producer::run, &producer); + std::vector consumers; + for (auto&& sub : subs) consumers.emplace_back(consumer, sub); + + sensor_emu.join(); + std::this_thread::sleep_for(std::chrono::milliseconds(100)); + producer.shutdown(); + producer_thread.join(); + for (auto&& c : consumers) c.join(); + + for (int i = 0, end = inputs.size(); i < end; ++i) { + EXPECT_EQ(orig_packets.size({i, client_state::LIDAR_DATA}), 0); + EXPECT_EQ(orig_packets.size({i, client_state::IMU_DATA}), 0); + } +}